VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 104030

Last change on this file since 104030 was 104018, checked in by vboxsync, 11 months ago

VMM/IEM: Dropped the argument and local variable counts from IEM_MC_BEGIN. bugref:10370

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 579.9 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 104018 2024-03-24 00:14:18Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Special case body for bytes instruction like SUB and XOR that can be used
61 * to zero a register.
62 *
63 * This can be used both for the r8_rm and rm_r8 forms since it's working on the
64 * same register.
65 */
66#define IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(a_bRm) \
67 if ( (a_bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT)) \
68 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB) \
69 { \
70 IEM_MC_BEGIN(0, 0); \
71 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
72 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_REG(pVCpu, a_bRm), 0); \
73 IEM_MC_LOCAL_EFLAGS(fEFlags); \
74 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS); \
75 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF); \
76 IEM_MC_COMMIT_EFLAGS(fEFlags); \
77 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
78 IEM_MC_END(); \
79 } ((void)0)
80
81/**
82 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
83 * memory/register as the destination.
84 */
85#define IEMOP_BODY_BINARY_rm_r8_RW(a_bRm, a_InsNm, a_fRegRegNativeArchs, a_fMemRegNativeArchs) \
86 /* \
87 * If rm is denoting a register, no more instruction bytes. \
88 */ \
89 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
90 { \
91 IEM_MC_BEGIN(0, 0); \
92 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
93 IEM_MC_ARG(uint8_t, u8Src, 1); \
94 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
95 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
96 IEM_MC_LOCAL(uint8_t, u8Dst); \
97 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
98 IEM_MC_LOCAL_EFLAGS(uEFlags); \
99 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
100 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, a_bRm), u8Dst); \
101 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
102 } IEM_MC_NATIVE_ELSE() { \
103 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
104 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
105 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
106 IEM_MC_REF_EFLAGS(pEFlags); \
107 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
108 } IEM_MC_NATIVE_ENDIF(); \
109 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
110 IEM_MC_END(); \
111 } \
112 else \
113 { \
114 /* \
115 * We're accessing memory. \
116 * Note! We're putting the eflags on the stack here so we can commit them \
117 * after the memory. \
118 */ \
119 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
120 { \
121 IEM_MC_BEGIN(0, 0); \
122 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
123 IEM_MC_ARG(uint8_t, u8Src, 1); \
124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
125 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
126 \
127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
128 IEMOP_HLP_DONE_DECODING(); \
129 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
130 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
131 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
132 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
133 \
134 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
135 IEM_MC_COMMIT_EFLAGS(EFlags); \
136 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
137 IEM_MC_END(); \
138 } \
139 else \
140 { \
141 IEM_MC_BEGIN(0, 0); \
142 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
143 IEM_MC_ARG(uint8_t, u8Src, 1); \
144 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
145 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
146 \
147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
148 IEMOP_HLP_DONE_DECODING(); \
149 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
150 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
151 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
152 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8_locked), pu8Dst, u8Src, pEFlags); \
153 \
154 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bMapInfoDst); \
155 IEM_MC_COMMIT_EFLAGS(EFlags); \
156 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
157 IEM_MC_END(); \
158 } \
159 } \
160 (void)0
161
162/**
163 * Body for instructions like TEST & CMP with a byte memory/registers as
164 * operands.
165 */
166#define IEMOP_BODY_BINARY_rm_r8_RO(a_bRm, a_fnNormalU8, a_EmitterBasename, a_fNativeArchs) \
167 /* \
168 * If rm is denoting a register, no more instruction bytes. \
169 */ \
170 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
171 { \
172 IEM_MC_BEGIN(0, 0); \
173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
174 IEM_MC_ARG(uint8_t, u8Src, 1); \
175 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
176 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
177 IEM_MC_LOCAL(uint8_t, u8Dst); \
178 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
179 IEM_MC_LOCAL_EFLAGS(uEFlags); \
180 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
181 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
182 } IEM_MC_NATIVE_ELSE() { \
183 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
184 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
185 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
186 IEM_MC_REF_EFLAGS(pEFlags); \
187 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
188 } IEM_MC_NATIVE_ENDIF(); \
189 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
190 IEM_MC_END(); \
191 } \
192 else \
193 { \
194 /* \
195 * We're accessing memory. \
196 * Note! We're putting the eflags on the stack here so we can commit them \
197 * after the memory. \
198 */ \
199 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
200 { \
201 IEM_MC_BEGIN(0, 0); \
202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
204 IEMOP_HLP_DONE_DECODING(); \
205 IEM_MC_NATIVE_IF(0) { \
206 IEM_MC_LOCAL(uint8_t, u8Dst); \
207 IEM_MC_FETCH_MEM_U8(u8Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
208 IEM_MC_LOCAL(uint8_t, u8SrcEmit); \
209 IEM_MC_FETCH_GREG_U8(u8SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
210 IEM_MC_LOCAL_EFLAGS(uEFlags); \
211 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u8Dst, u8SrcEmit, uEFlags, 8); \
212 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
213 } IEM_MC_NATIVE_ELSE() { \
214 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
215 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
216 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
217 IEM_MC_ARG(uint8_t, u8Src, 1); \
218 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
219 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
220 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
221 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
222 IEM_MC_COMMIT_EFLAGS(EFlags); \
223 } IEM_MC_NATIVE_ENDIF(); \
224 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
225 IEM_MC_END(); \
226 } \
227 else \
228 { \
229 /** @todo we should probably decode the address first. */ \
230 IEMOP_HLP_DONE_DECODING(); \
231 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
232 } \
233 } \
234 (void)0
235
236/**
237 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
238 * destination.
239 */
240#define IEMOP_BODY_BINARY_r8_rm(a_bRm, a_InsNm, a_fNativeArchs) \
241 /* \
242 * If rm is denoting a register, no more instruction bytes. \
243 */ \
244 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
245 { \
246 IEM_MC_BEGIN(0, 0); \
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
248 IEM_MC_ARG(uint8_t, u8Src, 1); \
249 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
250 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
251 IEM_MC_LOCAL(uint8_t, u8Dst); \
252 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
253 IEM_MC_LOCAL_EFLAGS(uEFlags); \
254 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
255 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, a_bRm), u8Dst); \
256 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
257 } IEM_MC_NATIVE_ELSE() { \
258 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
259 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
260 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
261 IEM_MC_REF_EFLAGS(pEFlags); \
262 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
263 } IEM_MC_NATIVE_ENDIF(); \
264 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
265 IEM_MC_END(); \
266 } \
267 else \
268 { \
269 /* \
270 * We're accessing memory. \
271 */ \
272 IEM_MC_BEGIN(0, 0); \
273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
276 IEM_MC_ARG(uint8_t, u8Src, 1); \
277 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
278 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
279 IEM_MC_LOCAL(uint8_t, u8Dst); \
280 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
281 IEM_MC_LOCAL_EFLAGS(uEFlags); \
282 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
283 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, a_bRm), u8Dst); \
284 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
285 } IEM_MC_NATIVE_ELSE() { \
286 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
287 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
288 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
289 IEM_MC_REF_EFLAGS(pEFlags); \
290 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
291 } IEM_MC_NATIVE_ENDIF(); \
292 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
293 IEM_MC_END(); \
294 } \
295 (void)0
296
297/**
298 * Body for byte instruction CMP with a register as the destination.
299 */
300#define IEMOP_BODY_BINARY_r8_rm_RO(a_bRm, a_InsNm, a_fNativeArchs) \
301 /* \
302 * If rm is denoting a register, no more instruction bytes. \
303 */ \
304 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
305 { \
306 IEM_MC_BEGIN(0, 0); \
307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
308 IEM_MC_ARG(uint8_t, u8Src, 1); \
309 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
310 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
311 IEM_MC_LOCAL(uint8_t, u8Dst); \
312 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
313 IEM_MC_LOCAL_EFLAGS(uEFlags); \
314 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
315 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
316 } IEM_MC_NATIVE_ELSE() { \
317 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
318 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
319 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
320 IEM_MC_REF_EFLAGS(pEFlags); \
321 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
322 } IEM_MC_NATIVE_ENDIF(); \
323 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
324 IEM_MC_END(); \
325 } \
326 else \
327 { \
328 /* \
329 * We're accessing memory. \
330 */ \
331 IEM_MC_BEGIN(0, 0); \
332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
335 IEM_MC_ARG(uint8_t, u8Src, 1); \
336 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
337 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
338 IEM_MC_LOCAL(uint8_t, u8Dst); \
339 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
340 IEM_MC_LOCAL_EFLAGS(uEFlags); \
341 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
342 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
343 } IEM_MC_NATIVE_ELSE() { \
344 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
345 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
346 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
347 IEM_MC_REF_EFLAGS(pEFlags); \
348 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
349 } IEM_MC_NATIVE_ENDIF(); \
350 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
351 IEM_MC_END(); \
352 } \
353 (void)0
354
355
356/**
357 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
358 * memory/register as the destination.
359 */
360#define IEMOP_BODY_BINARY_rm_rv_RW(a_bRm, a_InsNm, a_fRegRegNativeArchs, a_fMemRegNativeArchs) \
361 /* \
362 * If rm is denoting a register, no more instruction bytes. \
363 */ \
364 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
365 { \
366 switch (pVCpu->iem.s.enmEffOpSize) \
367 { \
368 case IEMMODE_16BIT: \
369 IEM_MC_BEGIN(0, 0); \
370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
371 IEM_MC_ARG(uint16_t, u16Src, 1); \
372 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
373 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
374 IEM_MC_LOCAL(uint16_t, u16Dst); \
375 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
376 IEM_MC_LOCAL_EFLAGS(uEFlags); \
377 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
378 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, a_bRm), u16Dst); \
379 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
380 } IEM_MC_NATIVE_ELSE() { \
381 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
382 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
383 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
384 IEM_MC_REF_EFLAGS(pEFlags); \
385 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
386 } IEM_MC_NATIVE_ENDIF(); \
387 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
388 IEM_MC_END(); \
389 break; \
390 \
391 case IEMMODE_32BIT: \
392 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
394 IEM_MC_ARG(uint32_t, u32Src, 1); \
395 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
396 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
397 IEM_MC_LOCAL(uint32_t, u32Dst); \
398 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
399 IEM_MC_LOCAL_EFLAGS(uEFlags); \
400 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
401 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, a_bRm), u32Dst); \
402 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
403 } IEM_MC_NATIVE_ELSE() { \
404 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
405 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
406 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
407 IEM_MC_REF_EFLAGS(pEFlags); \
408 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
409 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
410 } IEM_MC_NATIVE_ENDIF(); \
411 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
412 IEM_MC_END(); \
413 break; \
414 \
415 case IEMMODE_64BIT: \
416 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
418 IEM_MC_ARG(uint64_t, u64Src, 1); \
419 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
420 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
421 IEM_MC_LOCAL(uint64_t, u64Dst); \
422 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
423 IEM_MC_LOCAL_EFLAGS(uEFlags); \
424 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
425 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, a_bRm), u64Dst); \
426 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
427 } IEM_MC_NATIVE_ELSE() { \
428 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
429 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
430 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
431 IEM_MC_REF_EFLAGS(pEFlags); \
432 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
433 } IEM_MC_NATIVE_ENDIF(); \
434 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
435 IEM_MC_END(); \
436 break; \
437 \
438 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
439 } \
440 } \
441 else \
442 { \
443 /* \
444 * We're accessing memory. \
445 * Note! We're putting the eflags on the stack here so we can commit them \
446 * after the memory. \
447 */ \
448 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
449 { \
450 switch (pVCpu->iem.s.enmEffOpSize) \
451 { \
452 case IEMMODE_16BIT: \
453 IEM_MC_BEGIN(0, 0); \
454 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
455 IEM_MC_ARG(uint16_t, u16Src, 1); \
456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
457 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
458 \
459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
460 IEMOP_HLP_DONE_DECODING(); \
461 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
462 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
463 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
464 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
465 \
466 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
467 IEM_MC_COMMIT_EFLAGS(EFlags); \
468 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
469 IEM_MC_END(); \
470 break; \
471 \
472 case IEMMODE_32BIT: \
473 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
474 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
475 IEM_MC_ARG(uint32_t, u32Src, 1); \
476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
477 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
478 \
479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
480 IEMOP_HLP_DONE_DECODING(); \
481 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
482 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
483 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
484 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
485 \
486 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
487 IEM_MC_COMMIT_EFLAGS(EFlags); \
488 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
489 IEM_MC_END(); \
490 break; \
491 \
492 case IEMMODE_64BIT: \
493 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
494 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
495 IEM_MC_ARG(uint64_t, u64Src, 1); \
496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
497 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
498 \
499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
500 IEMOP_HLP_DONE_DECODING(); \
501 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
502 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
503 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
504 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
505 \
506 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
507 IEM_MC_COMMIT_EFLAGS(EFlags); \
508 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
509 IEM_MC_END(); \
510 break; \
511 \
512 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
513 } \
514 } \
515 else \
516 { \
517 (void)0
518/* Separate macro to work around parsing issue in IEMAllInstPython.py */
519#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_bRm, a_InsNm) \
520 switch (pVCpu->iem.s.enmEffOpSize) \
521 { \
522 case IEMMODE_16BIT: \
523 IEM_MC_BEGIN(0, 0); \
524 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
525 IEM_MC_ARG(uint16_t, u16Src, 1); \
526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
527 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
528 \
529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
530 IEMOP_HLP_DONE_DECODING(); \
531 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
532 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
533 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
534 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16_locked), pu16Dst, u16Src, pEFlags); \
535 \
536 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
537 IEM_MC_COMMIT_EFLAGS(EFlags); \
538 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
539 IEM_MC_END(); \
540 break; \
541 \
542 case IEMMODE_32BIT: \
543 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
544 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
545 IEM_MC_ARG(uint32_t, u32Src, 1); \
546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
547 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
548 \
549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
550 IEMOP_HLP_DONE_DECODING(); \
551 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
552 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
553 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
554 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32_locked), pu32Dst, u32Src, pEFlags); \
555 \
556 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo /* CMP,TEST */); \
557 IEM_MC_COMMIT_EFLAGS(EFlags); \
558 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
559 IEM_MC_END(); \
560 break; \
561 \
562 case IEMMODE_64BIT: \
563 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
564 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
565 IEM_MC_ARG(uint64_t, u64Src, 1); \
566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
567 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
568 \
569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
570 IEMOP_HLP_DONE_DECODING(); \
571 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
572 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
573 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
574 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64_locked), pu64Dst, u64Src, pEFlags); \
575 \
576 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
577 IEM_MC_COMMIT_EFLAGS(EFlags); \
578 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
579 IEM_MC_END(); \
580 break; \
581 \
582 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
583 } \
584 } \
585 } \
586 (void)0
587
588/**
589 * Body for read-only word/dword/qword instructions like TEST and CMP with
590 * memory/register as the destination.
591 */
592#define IEMOP_BODY_BINARY_rm_rv_RO(a_bRm, a_InsNm, a_fNativeArchs) \
593 /* \
594 * If rm is denoting a register, no more instruction bytes. \
595 */ \
596 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
597 { \
598 switch (pVCpu->iem.s.enmEffOpSize) \
599 { \
600 case IEMMODE_16BIT: \
601 IEM_MC_BEGIN(0, 0); \
602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
603 IEM_MC_ARG(uint16_t, u16Src, 1); \
604 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
605 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
606 IEM_MC_LOCAL(uint16_t, u16Dst); \
607 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
608 IEM_MC_LOCAL_EFLAGS(uEFlags); \
609 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
610 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
611 } IEM_MC_NATIVE_ELSE() { \
612 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
613 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
614 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
615 IEM_MC_REF_EFLAGS(pEFlags); \
616 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
617 } IEM_MC_NATIVE_ENDIF(); \
618 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
619 IEM_MC_END(); \
620 break; \
621 \
622 case IEMMODE_32BIT: \
623 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
625 IEM_MC_ARG(uint32_t, u32Src, 1); \
626 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
627 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
628 IEM_MC_LOCAL(uint32_t, u32Dst); \
629 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
630 IEM_MC_LOCAL_EFLAGS(uEFlags); \
631 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
632 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
633 } IEM_MC_NATIVE_ELSE() { \
634 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
635 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
636 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
637 IEM_MC_REF_EFLAGS(pEFlags); \
638 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
639 } IEM_MC_NATIVE_ENDIF(); \
640 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
641 IEM_MC_END(); \
642 break; \
643 \
644 case IEMMODE_64BIT: \
645 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
647 IEM_MC_ARG(uint64_t, u64Src, 1); \
648 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
649 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
650 IEM_MC_LOCAL(uint64_t, u64Dst); \
651 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
652 IEM_MC_LOCAL_EFLAGS(uEFlags); \
653 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
654 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
655 } IEM_MC_NATIVE_ELSE() { \
656 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
657 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
658 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
659 IEM_MC_REF_EFLAGS(pEFlags); \
660 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
661 } IEM_MC_NATIVE_ENDIF(); \
662 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
663 IEM_MC_END(); \
664 break; \
665 \
666 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
667 } \
668 } \
669 else \
670 { \
671 /* \
672 * We're accessing memory. \
673 * Note! We're putting the eflags on the stack here so we can commit them \
674 * after the memory. \
675 */ \
676 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
677 { \
678 switch (pVCpu->iem.s.enmEffOpSize) \
679 { \
680 case IEMMODE_16BIT: \
681 IEM_MC_BEGIN(0, 0); \
682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
684 IEMOP_HLP_DONE_DECODING(); \
685 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
686 IEM_MC_LOCAL(uint16_t, u16Dst); \
687 IEM_MC_FETCH_MEM_U16(u16Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
688 IEM_MC_LOCAL(uint16_t, u16SrcEmit); \
689 IEM_MC_FETCH_GREG_U16(u16SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
690 IEM_MC_LOCAL_EFLAGS(uEFlags); \
691 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u16Dst, u16SrcEmit, uEFlags, 16); \
692 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
693 } IEM_MC_NATIVE_ELSE() { \
694 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
695 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
696 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
697 IEM_MC_ARG(uint16_t, u16Src, 1); \
698 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
699 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
700 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
701 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
702 IEM_MC_COMMIT_EFLAGS(EFlags); \
703 } IEM_MC_NATIVE_ENDIF(); \
704 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
705 IEM_MC_END(); \
706 break; \
707 \
708 case IEMMODE_32BIT: \
709 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
712 IEMOP_HLP_DONE_DECODING(); \
713 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
714 IEM_MC_LOCAL(uint32_t, u32Dst); \
715 IEM_MC_FETCH_MEM_U32(u32Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
716 IEM_MC_LOCAL(uint32_t, u32SrcEmit); \
717 IEM_MC_FETCH_GREG_U32(u32SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
718 IEM_MC_LOCAL_EFLAGS(uEFlags); \
719 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u32Dst, u32SrcEmit, uEFlags, 32); \
720 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
721 } IEM_MC_NATIVE_ELSE() { \
722 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
723 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
724 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
725 IEM_MC_ARG(uint32_t, u32Src, 1); \
726 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
727 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
728 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
729 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
730 IEM_MC_COMMIT_EFLAGS(EFlags); \
731 } IEM_MC_NATIVE_ENDIF(); \
732 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
733 IEM_MC_END(); \
734 break; \
735 \
736 case IEMMODE_64BIT: \
737 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
738 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
740 IEMOP_HLP_DONE_DECODING(); \
741 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
742 IEM_MC_LOCAL(uint64_t, u64Dst); \
743 IEM_MC_FETCH_MEM_U64(u64Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
744 IEM_MC_LOCAL(uint64_t, u64SrcEmit); \
745 IEM_MC_FETCH_GREG_U64(u64SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
746 IEM_MC_LOCAL_EFLAGS(uEFlags); \
747 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u64Dst, u64SrcEmit, uEFlags, 64); \
748 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
749 } IEM_MC_NATIVE_ELSE() { \
750 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
751 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
752 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
753 IEM_MC_ARG(uint64_t, u64Src, 1); \
754 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
755 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
756 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
757 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
758 IEM_MC_COMMIT_EFLAGS(EFlags); \
759 } IEM_MC_NATIVE_ENDIF(); \
760 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
761 IEM_MC_END(); \
762 break; \
763 \
764 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
765 } \
766 } \
767 else \
768 { \
769 IEMOP_HLP_DONE_DECODING(); \
770 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
771 } \
772 } \
773 (void)0
774
775
776/**
777 * Body for instructions like ADD, AND, OR, ++ with working on AL with
778 * a byte immediate.
779 */
780#define IEMOP_BODY_BINARY_AL_Ib(a_InsNm, a_fNativeArchs) \
781 IEM_MC_BEGIN(0, 0); \
782 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
784 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
785 IEM_MC_LOCAL(uint8_t, u8Dst); \
786 IEM_MC_FETCH_GREG_U8(u8Dst, X86_GREG_xAX); \
787 IEM_MC_LOCAL(uint32_t, uEFlags); \
788 IEM_MC_FETCH_EFLAGS(uEFlags); \
789 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u8Dst, u8Imm, uEFlags, 8, 8); \
790 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Dst); \
791 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
792 } IEM_MC_NATIVE_ELSE() { \
793 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
794 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
795 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
796 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
797 IEM_MC_REF_EFLAGS(pEFlags); \
798 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
799 } IEM_MC_NATIVE_ENDIF(); \
800 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
801 IEM_MC_END()
802
803/**
804 * Body for instructions like ADD, AND, OR, ++ with working on
805 * AX/EAX/RAX with a word/dword immediate.
806 */
807#define IEMOP_BODY_BINARY_rAX_Iz_RW(a_InsNm, a_fNativeArchs) \
808 switch (pVCpu->iem.s.enmEffOpSize) \
809 { \
810 case IEMMODE_16BIT: \
811 { \
812 IEM_MC_BEGIN(0, 0); \
813 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
815 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
816 IEM_MC_LOCAL(uint16_t, u16Dst); \
817 IEM_MC_FETCH_GREG_U16(u16Dst, X86_GREG_xAX); \
818 IEM_MC_LOCAL(uint32_t, uEFlags); \
819 IEM_MC_FETCH_EFLAGS(uEFlags); \
820 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
821 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Dst); \
822 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
823 } IEM_MC_NATIVE_ELSE() { \
824 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
825 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
826 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
827 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
828 IEM_MC_REF_EFLAGS(pEFlags); \
829 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
830 } IEM_MC_NATIVE_ENDIF(); \
831 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
832 IEM_MC_END(); \
833 } \
834 \
835 case IEMMODE_32BIT: \
836 { \
837 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
838 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
840 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
841 IEM_MC_LOCAL(uint32_t, u32Dst); \
842 IEM_MC_FETCH_GREG_U32(u32Dst, X86_GREG_xAX); \
843 IEM_MC_LOCAL(uint32_t, uEFlags); \
844 IEM_MC_FETCH_EFLAGS(uEFlags); \
845 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
846 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Dst); \
847 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
848 } IEM_MC_NATIVE_ELSE() { \
849 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
850 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
851 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
852 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
853 IEM_MC_REF_EFLAGS(pEFlags); \
854 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
855 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
856 } IEM_MC_NATIVE_ENDIF(); \
857 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
858 IEM_MC_END(); \
859 } \
860 \
861 case IEMMODE_64BIT: \
862 { \
863 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
864 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
866 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
867 IEM_MC_LOCAL(uint64_t, u64Dst); \
868 IEM_MC_FETCH_GREG_U64(u64Dst, X86_GREG_xAX); \
869 IEM_MC_LOCAL(uint32_t, uEFlags); \
870 IEM_MC_FETCH_EFLAGS(uEFlags); \
871 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
872 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Dst); \
873 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
874 } IEM_MC_NATIVE_ELSE() { \
875 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
876 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
877 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
878 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
879 IEM_MC_REF_EFLAGS(pEFlags); \
880 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
881 } IEM_MC_NATIVE_ENDIF(); \
882 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
883 IEM_MC_END(); \
884 } \
885 \
886 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
887 } \
888 (void)0
889
890/**
891 * Body for the instructions CMP and TEST working on AX/EAX/RAX with a
892 * word/dword immediate.
893 */
894#define IEMOP_BODY_BINARY_rAX_Iz_RO(a_InsNm, a_fNativeArchs) \
895 switch (pVCpu->iem.s.enmEffOpSize) \
896 { \
897 case IEMMODE_16BIT: \
898 { \
899 IEM_MC_BEGIN(0, 0); \
900 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
902 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
903 IEM_MC_LOCAL(uint16_t, u16Dst); \
904 IEM_MC_FETCH_GREG_U16(u16Dst, X86_GREG_xAX); \
905 IEM_MC_LOCAL(uint32_t, uEFlags); \
906 IEM_MC_FETCH_EFLAGS(uEFlags); \
907 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
908 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
909 } IEM_MC_NATIVE_ELSE() { \
910 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
911 IEM_MC_ARG(uint16_t const *,pu16Dst, 0); \
912 IEM_MC_REF_GREG_U16_CONST(pu16Dst, X86_GREG_xAX); \
913 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
914 IEM_MC_REF_EFLAGS(pEFlags); \
915 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
916 } IEM_MC_NATIVE_ENDIF(); \
917 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
918 IEM_MC_END(); \
919 } \
920 \
921 case IEMMODE_32BIT: \
922 { \
923 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
924 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
926 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
927 IEM_MC_LOCAL(uint32_t, u32Dst); \
928 IEM_MC_FETCH_GREG_U32(u32Dst, X86_GREG_xAX); \
929 IEM_MC_LOCAL(uint32_t, uEFlags); \
930 IEM_MC_FETCH_EFLAGS(uEFlags); \
931 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
932 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
933 } IEM_MC_NATIVE_ELSE() { \
934 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
935 IEM_MC_ARG(uint32_t const *,pu32Dst, 0); \
936 IEM_MC_REF_GREG_U32_CONST(pu32Dst, X86_GREG_xAX); \
937 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
938 IEM_MC_REF_EFLAGS(pEFlags); \
939 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
940 } IEM_MC_NATIVE_ENDIF(); \
941 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
942 IEM_MC_END(); \
943 } \
944 \
945 case IEMMODE_64BIT: \
946 { \
947 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
948 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
950 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
951 IEM_MC_LOCAL(uint64_t, u64Dst); \
952 IEM_MC_FETCH_GREG_U64(u64Dst, X86_GREG_xAX); \
953 IEM_MC_LOCAL(uint32_t, uEFlags); \
954 IEM_MC_FETCH_EFLAGS(uEFlags); \
955 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
956 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
957 } IEM_MC_NATIVE_ELSE() { \
958 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
959 IEM_MC_ARG(uint64_t const *,pu64Dst, 0); \
960 IEM_MC_REF_GREG_U64_CONST(pu64Dst, X86_GREG_xAX); \
961 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
962 IEM_MC_REF_EFLAGS(pEFlags); \
963 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
964 } IEM_MC_NATIVE_ENDIF(); \
965 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
966 IEM_MC_END(); \
967 } \
968 \
969 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
970 } \
971 (void)0
972
973
974
975/* Instruction specification format - work in progress: */
976
977/**
978 * @opcode 0x00
979 * @opmnemonic add
980 * @op1 rm:Eb
981 * @op2 reg:Gb
982 * @opmaps one
983 * @openc ModR/M
984 * @opflclass arithmetic
985 * @ophints harmless ignores_op_sizes
986 * @opstats add_Eb_Gb
987 * @opgroup og_gen_arith_bin
988 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
989 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
990 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
991 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
992 */
993FNIEMOP_DEF(iemOp_add_Eb_Gb)
994{
995 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
996 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
997 IEMOP_BODY_BINARY_rm_r8_RW(bRm, add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
998}
999
1000
1001/**
1002 * @opcode 0x01
1003 * @opgroup og_gen_arith_bin
1004 * @opflclass arithmetic
1005 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1006 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
1007 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
1008 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
1009 */
1010FNIEMOP_DEF(iemOp_add_Ev_Gv)
1011{
1012 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1014 IEMOP_BODY_BINARY_rm_rv_RW( bRm, add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1015 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, add);
1016}
1017
1018
1019/**
1020 * @opcode 0x02
1021 * @opgroup og_gen_arith_bin
1022 * @opflclass arithmetic
1023 * @opcopytests iemOp_add_Eb_Gb
1024 */
1025FNIEMOP_DEF(iemOp_add_Gb_Eb)
1026{
1027 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1028 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1029 IEMOP_BODY_BINARY_r8_rm(bRm, add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1030}
1031
1032
1033/**
1034 * @opcode 0x03
1035 * @opgroup og_gen_arith_bin
1036 * @opflclass arithmetic
1037 * @opcopytests iemOp_add_Ev_Gv
1038 */
1039FNIEMOP_DEF(iemOp_add_Gv_Ev)
1040{
1041 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1042 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1043 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 0, add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1044}
1045
1046
1047/**
1048 * @opcode 0x04
1049 * @opgroup og_gen_arith_bin
1050 * @opflclass arithmetic
1051 * @opcopytests iemOp_add_Eb_Gb
1052 */
1053FNIEMOP_DEF(iemOp_add_Al_Ib)
1054{
1055 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1056 IEMOP_BODY_BINARY_AL_Ib(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1057}
1058
1059
1060/**
1061 * @opcode 0x05
1062 * @opgroup og_gen_arith_bin
1063 * @opflclass arithmetic
1064 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
1065 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
1066 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
1067 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
1068 */
1069FNIEMOP_DEF(iemOp_add_eAX_Iz)
1070{
1071 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1072 IEMOP_BODY_BINARY_rAX_Iz_RW(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1073}
1074
1075
1076/**
1077 * @opcode 0x06
1078 * @opgroup og_stack_sreg
1079 */
1080FNIEMOP_DEF(iemOp_push_ES)
1081{
1082 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1083 IEMOP_HLP_NO_64BIT();
1084 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
1085}
1086
1087
1088/**
1089 * @opcode 0x07
1090 * @opgroup og_stack_sreg
1091 */
1092FNIEMOP_DEF(iemOp_pop_ES)
1093{
1094 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1095 IEMOP_HLP_NO_64BIT();
1096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1097 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1098 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1099 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
1100 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
1101 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
1102 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES),
1103 iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
1104}
1105
1106
1107/**
1108 * @opcode 0x08
1109 * @opgroup og_gen_arith_bin
1110 * @opflclass logical
1111 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
1112 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
1113 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
1114 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
1115 */
1116FNIEMOP_DEF(iemOp_or_Eb_Gb)
1117{
1118 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1119 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1120 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1121 IEMOP_BODY_BINARY_rm_r8_RW(bRm, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1122}
1123
1124
1125/*
1126 * @opcode 0x09
1127 * @opgroup og_gen_arith_bin
1128 * @opflclass logical
1129 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
1130 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
1131 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1132 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1133 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1134 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1135 * @note AF is documented as undefined, but both modern AMD and Intel CPUs clears it.
1136 */
1137FNIEMOP_DEF(iemOp_or_Ev_Gv)
1138{
1139 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1140 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1141 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1142 IEMOP_BODY_BINARY_rm_rv_RW( bRm, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1143 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, or);
1144}
1145
1146
1147/**
1148 * @opcode 0x0a
1149 * @opgroup og_gen_arith_bin
1150 * @opflclass logical
1151 * @opcopytests iemOp_or_Eb_Gb
1152 */
1153FNIEMOP_DEF(iemOp_or_Gb_Eb)
1154{
1155 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1156 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1157 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1158 IEMOP_BODY_BINARY_r8_rm(bRm, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1159}
1160
1161
1162/**
1163 * @opcode 0x0b
1164 * @opgroup og_gen_arith_bin
1165 * @opflclass logical
1166 * @opcopytests iemOp_or_Ev_Gv
1167 */
1168FNIEMOP_DEF(iemOp_or_Gv_Ev)
1169{
1170 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1171 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1173 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 0, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1174}
1175
1176
1177/**
1178 * @opcode 0x0c
1179 * @opgroup og_gen_arith_bin
1180 * @opflclass logical
1181 * @opcopytests iemOp_or_Eb_Gb
1182 */
1183FNIEMOP_DEF(iemOp_or_Al_Ib)
1184{
1185 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1186 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1187 IEMOP_BODY_BINARY_AL_Ib(or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1188}
1189
1190
1191/**
1192 * @opcode 0x0d
1193 * @opgroup og_gen_arith_bin
1194 * @opflclass logical
1195 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
1196 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
1197 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1198 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1199 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1200 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1201 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
1202 */
1203FNIEMOP_DEF(iemOp_or_eAX_Iz)
1204{
1205 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1206 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1207 IEMOP_BODY_BINARY_rAX_Iz_RW(or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1208}
1209
1210
1211/**
1212 * @opcode 0x0e
1213 * @opgroup og_stack_sreg
1214 */
1215FNIEMOP_DEF(iemOp_push_CS)
1216{
1217 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
1218 IEMOP_HLP_NO_64BIT();
1219 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
1220}
1221
1222
1223/**
1224 * @opcode 0x0f
1225 * @opmnemonic EscTwo0f
1226 * @openc two0f
1227 * @opdisenum OP_2B_ESC
1228 * @ophints harmless
1229 * @opgroup og_escapes
1230 */
1231FNIEMOP_DEF(iemOp_2byteEscape)
1232{
1233#if 0 /// @todo def VBOX_STRICT
1234 /* Sanity check the table the first time around. */
1235 static bool s_fTested = false;
1236 if (RT_LIKELY(s_fTested)) { /* likely */ }
1237 else
1238 {
1239 s_fTested = true;
1240 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
1241 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
1242 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
1243 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
1244 }
1245#endif
1246
1247 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
1248 {
1249 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1250 IEMOP_HLP_MIN_286();
1251 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
1252 }
1253 /* @opdone */
1254
1255 /*
1256 * On the 8086 this is a POP CS instruction.
1257 * For the time being we don't specify this this.
1258 */
1259 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
1260 IEMOP_HLP_NO_64BIT();
1261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1262 /** @todo eliminate END_TB here */
1263 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1264 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1265 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_CS),
1266 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1267}
1268
1269/**
1270 * @opcode 0x10
1271 * @opgroup og_gen_arith_bin
1272 * @opflclass arithmetic_carry
1273 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1274 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1275 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1276 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1277 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1278 */
1279FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1280{
1281 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1282 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1283 IEMOP_BODY_BINARY_rm_r8_RW(bRm, adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1284}
1285
1286
1287/**
1288 * @opcode 0x11
1289 * @opgroup og_gen_arith_bin
1290 * @opflclass arithmetic_carry
1291 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1292 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1293 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1294 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1295 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1296 */
1297FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1298{
1299 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1301 IEMOP_BODY_BINARY_rm_rv_RW( bRm, adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1302 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, adc);
1303}
1304
1305
1306/**
1307 * @opcode 0x12
1308 * @opgroup og_gen_arith_bin
1309 * @opflclass arithmetic_carry
1310 * @opcopytests iemOp_adc_Eb_Gb
1311 */
1312FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1313{
1314 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1315 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1316 IEMOP_BODY_BINARY_r8_rm(bRm, adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1317}
1318
1319
1320/**
1321 * @opcode 0x13
1322 * @opgroup og_gen_arith_bin
1323 * @opflclass arithmetic_carry
1324 * @opcopytests iemOp_adc_Ev_Gv
1325 */
1326FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1327{
1328 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1329 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1330 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 0, adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1331}
1332
1333
1334/**
1335 * @opcode 0x14
1336 * @opgroup og_gen_arith_bin
1337 * @opflclass arithmetic_carry
1338 * @opcopytests iemOp_adc_Eb_Gb
1339 */
1340FNIEMOP_DEF(iemOp_adc_Al_Ib)
1341{
1342 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1343 IEMOP_BODY_BINARY_AL_Ib(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1344}
1345
1346
1347/**
1348 * @opcode 0x15
1349 * @opgroup og_gen_arith_bin
1350 * @opflclass arithmetic_carry
1351 * @opcopytests iemOp_adc_Ev_Gv
1352 */
1353FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1354{
1355 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1356 IEMOP_BODY_BINARY_rAX_Iz_RW(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1357}
1358
1359
1360/**
1361 * @opcode 0x16
1362 */
1363FNIEMOP_DEF(iemOp_push_SS)
1364{
1365 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1366 IEMOP_HLP_NO_64BIT();
1367 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1368}
1369
1370
1371/**
1372 * @opcode 0x17
1373 */
1374FNIEMOP_DEF(iemOp_pop_SS)
1375{
1376 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1378 IEMOP_HLP_NO_64BIT();
1379 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1380 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1381 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_SS)
1382 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_SS)
1383 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_SS)
1384 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_SS),
1385 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1386}
1387
1388
1389/**
1390 * @opcode 0x18
1391 * @opgroup og_gen_arith_bin
1392 * @opflclass arithmetic_carry
1393 */
1394FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1395{
1396 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1397 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1398 IEMOP_BODY_BINARY_rm_r8_RW(bRm, sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1399}
1400
1401
1402/**
1403 * @opcode 0x19
1404 * @opgroup og_gen_arith_bin
1405 * @opflclass arithmetic_carry
1406 */
1407FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1408{
1409 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1410 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1411 IEMOP_BODY_BINARY_rm_rv_RW( bRm, sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1412 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, sbb);
1413}
1414
1415
1416/**
1417 * @opcode 0x1a
1418 * @opgroup og_gen_arith_bin
1419 * @opflclass arithmetic_carry
1420 */
1421FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1422{
1423 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1424 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1425 IEMOP_BODY_BINARY_r8_rm(bRm, sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1426}
1427
1428
1429/**
1430 * @opcode 0x1b
1431 * @opgroup og_gen_arith_bin
1432 * @opflclass arithmetic_carry
1433 */
1434FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1435{
1436 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1437 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1438 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 0, sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1439}
1440
1441
1442/**
1443 * @opcode 0x1c
1444 * @opgroup og_gen_arith_bin
1445 * @opflclass arithmetic_carry
1446 */
1447FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1448{
1449 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1450 IEMOP_BODY_BINARY_AL_Ib(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1451}
1452
1453
1454/**
1455 * @opcode 0x1d
1456 * @opgroup og_gen_arith_bin
1457 * @opflclass arithmetic_carry
1458 */
1459FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1460{
1461 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1462 IEMOP_BODY_BINARY_rAX_Iz_RW(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1463}
1464
1465
1466/**
1467 * @opcode 0x1e
1468 * @opgroup og_stack_sreg
1469 */
1470FNIEMOP_DEF(iemOp_push_DS)
1471{
1472 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1473 IEMOP_HLP_NO_64BIT();
1474 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1475}
1476
1477
1478/**
1479 * @opcode 0x1f
1480 * @opgroup og_stack_sreg
1481 */
1482FNIEMOP_DEF(iemOp_pop_DS)
1483{
1484 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1486 IEMOP_HLP_NO_64BIT();
1487 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1488 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1489 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
1490 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
1491 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
1492 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS),
1493 iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1494}
1495
1496
1497/**
1498 * @opcode 0x20
1499 * @opgroup og_gen_arith_bin
1500 * @opflclass logical
1501 */
1502FNIEMOP_DEF(iemOp_and_Eb_Gb)
1503{
1504 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1505 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1506 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1507 IEMOP_BODY_BINARY_rm_r8_RW(bRm, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1508}
1509
1510
1511/**
1512 * @opcode 0x21
1513 * @opgroup og_gen_arith_bin
1514 * @opflclass logical
1515 */
1516FNIEMOP_DEF(iemOp_and_Ev_Gv)
1517{
1518 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1519 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1521 IEMOP_BODY_BINARY_rm_rv_RW( bRm, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1522 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, and);
1523}
1524
1525
1526/**
1527 * @opcode 0x22
1528 * @opgroup og_gen_arith_bin
1529 * @opflclass logical
1530 */
1531FNIEMOP_DEF(iemOp_and_Gb_Eb)
1532{
1533 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1534 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1535 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1536 IEMOP_BODY_BINARY_r8_rm(bRm, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1537}
1538
1539
1540/**
1541 * @opcode 0x23
1542 * @opgroup og_gen_arith_bin
1543 * @opflclass logical
1544 */
1545FNIEMOP_DEF(iemOp_and_Gv_Ev)
1546{
1547 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1548 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1549 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1550 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 0, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1551}
1552
1553
1554/**
1555 * @opcode 0x24
1556 * @opgroup og_gen_arith_bin
1557 * @opflclass logical
1558 */
1559FNIEMOP_DEF(iemOp_and_Al_Ib)
1560{
1561 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1562 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1563 IEMOP_BODY_BINARY_AL_Ib(and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1564}
1565
1566
1567/**
1568 * @opcode 0x25
1569 * @opgroup og_gen_arith_bin
1570 * @opflclass logical
1571 */
1572FNIEMOP_DEF(iemOp_and_eAX_Iz)
1573{
1574 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1575 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1576 IEMOP_BODY_BINARY_rAX_Iz_RW(and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1577}
1578
1579
1580/**
1581 * @opcode 0x26
1582 * @opmnemonic SEG
1583 * @op1 ES
1584 * @opgroup og_prefix
1585 * @openc prefix
1586 * @opdisenum OP_SEG
1587 * @ophints harmless
1588 */
1589FNIEMOP_DEF(iemOp_seg_ES)
1590{
1591 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1592 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1593 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1594
1595 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1596 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1597}
1598
1599
1600/**
1601 * @opcode 0x27
1602 * @opfltest af,cf
1603 * @opflmodify cf,pf,af,zf,sf,of
1604 * @opflundef of
1605 */
1606FNIEMOP_DEF(iemOp_daa)
1607{
1608 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1609 IEMOP_HLP_NO_64BIT();
1610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1611 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1612 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_daa);
1613}
1614
1615
1616/**
1617 * @opcode 0x28
1618 * @opgroup og_gen_arith_bin
1619 * @opflclass arithmetic
1620 */
1621FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1622{
1623 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1624 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1625 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */
1626 IEMOP_BODY_BINARY_rm_r8_RW(bRm, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1627}
1628
1629
1630/**
1631 * @opcode 0x29
1632 * @opgroup og_gen_arith_bin
1633 * @opflclass arithmetic
1634 */
1635FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1636{
1637 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1638 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1639 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */
1640 IEMOP_BODY_BINARY_rm_rv_RW( bRm, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1641 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, sub);
1642}
1643
1644
1645/**
1646 * @opcode 0x2a
1647 * @opgroup og_gen_arith_bin
1648 * @opflclass arithmetic
1649 */
1650FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1651{
1652 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1653 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1654 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */
1655 IEMOP_BODY_BINARY_r8_rm(bRm, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1656}
1657
1658
1659/**
1660 * @opcode 0x2b
1661 * @opgroup og_gen_arith_bin
1662 * @opflclass arithmetic
1663 */
1664FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1665{
1666 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1667 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1668 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */
1669 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 0, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1670}
1671
1672
1673/**
1674 * @opcode 0x2c
1675 * @opgroup og_gen_arith_bin
1676 * @opflclass arithmetic
1677 */
1678FNIEMOP_DEF(iemOp_sub_Al_Ib)
1679{
1680 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1681 IEMOP_BODY_BINARY_AL_Ib(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1682}
1683
1684
1685/**
1686 * @opcode 0x2d
1687 * @opgroup og_gen_arith_bin
1688 * @opflclass arithmetic
1689 */
1690FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1691{
1692 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1693 IEMOP_BODY_BINARY_rAX_Iz_RW(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1694}
1695
1696
1697/**
1698 * @opcode 0x2e
1699 * @opmnemonic SEG
1700 * @op1 CS
1701 * @opgroup og_prefix
1702 * @openc prefix
1703 * @opdisenum OP_SEG
1704 * @ophints harmless
1705 */
1706FNIEMOP_DEF(iemOp_seg_CS)
1707{
1708 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1709 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1710 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1711
1712 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1713 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1714}
1715
1716
1717/**
1718 * @opcode 0x2f
1719 * @opfltest af,cf
1720 * @opflmodify cf,pf,af,zf,sf,of
1721 * @opflundef of
1722 */
1723FNIEMOP_DEF(iemOp_das)
1724{
1725 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1726 IEMOP_HLP_NO_64BIT();
1727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1728 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1729 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_das);
1730}
1731
1732
1733/**
1734 * @opcode 0x30
1735 * @opgroup og_gen_arith_bin
1736 * @opflclass logical
1737 */
1738FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1739{
1740 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1741 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1742 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1743 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */
1744 IEMOP_BODY_BINARY_rm_r8_RW(bRm, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1745}
1746
1747
1748/**
1749 * @opcode 0x31
1750 * @opgroup og_gen_arith_bin
1751 * @opflclass logical
1752 */
1753FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1754{
1755 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1756 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1757 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1758 IEMOP_BODY_BINARY_rm_rv_RW( bRm, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1759 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */
1760 IEMOP_BODY_BINARY_rm_rv_LOCKED( bRm, xor);
1761}
1762
1763
1764/**
1765 * @opcode 0x32
1766 * @opgroup og_gen_arith_bin
1767 * @opflclass logical
1768 */
1769FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1770{
1771 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1772 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1773 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1774 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */
1775 IEMOP_BODY_BINARY_r8_rm(bRm, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1776}
1777
1778
1779/**
1780 * @opcode 0x33
1781 * @opgroup og_gen_arith_bin
1782 * @opflclass logical
1783 */
1784FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1785{
1786 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1787 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1788 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1789 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */
1790 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 0, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1791}
1792
1793
1794/**
1795 * @opcode 0x34
1796 * @opgroup og_gen_arith_bin
1797 * @opflclass logical
1798 */
1799FNIEMOP_DEF(iemOp_xor_Al_Ib)
1800{
1801 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1802 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1803 IEMOP_BODY_BINARY_AL_Ib(xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1804}
1805
1806
1807/**
1808 * @opcode 0x35
1809 * @opgroup og_gen_arith_bin
1810 * @opflclass logical
1811 */
1812FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1813{
1814 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1815 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1816 IEMOP_BODY_BINARY_rAX_Iz_RW(xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1817}
1818
1819
1820/**
1821 * @opcode 0x36
1822 * @opmnemonic SEG
1823 * @op1 SS
1824 * @opgroup og_prefix
1825 * @openc prefix
1826 * @opdisenum OP_SEG
1827 * @ophints harmless
1828 */
1829FNIEMOP_DEF(iemOp_seg_SS)
1830{
1831 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1832 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1833 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1834
1835 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1836 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1837}
1838
1839
1840/**
1841 * @opcode 0x37
1842 * @opfltest af
1843 * @opflmodify cf,pf,af,zf,sf,of
1844 * @opflundef pf,zf,sf,of
1845 * @opgroup og_gen_arith_dec
1846 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1847 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1848 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1849 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1850 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1851 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1852 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1853 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1854 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1855 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1856 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1857 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1858 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1859 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1860 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1861 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1862 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1863 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1864 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1865 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1866 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1867 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1868 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1869 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1870 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1871 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1872 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1873 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1874 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1875 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1876 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1877 */
1878FNIEMOP_DEF(iemOp_aaa)
1879{
1880 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1881 IEMOP_HLP_NO_64BIT();
1882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1883 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1884
1885 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aaa);
1886}
1887
1888
1889/**
1890 * @opcode 0x38
1891 * @opflclass arithmetic
1892 */
1893FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1894{
1895 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1896 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1897 IEMOP_BODY_BINARY_rm_r8_RO(bRm, iemAImpl_cmp_u8, cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1898}
1899
1900
1901/**
1902 * @opcode 0x39
1903 * @opflclass arithmetic
1904 */
1905FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1906{
1907 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1908 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1909 IEMOP_BODY_BINARY_rm_rv_RO(bRm, cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1910}
1911
1912
1913/**
1914 * @opcode 0x3a
1915 * @opflclass arithmetic
1916 */
1917FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1918{
1919 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1920 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1921 IEMOP_BODY_BINARY_r8_rm_RO(bRm, cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1922}
1923
1924
1925/**
1926 * @opcode 0x3b
1927 * @opflclass arithmetic
1928 */
1929FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1930{
1931 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1932 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1933 IEMOP_BODY_BINARY_rv_rm_RO(bRm, cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1934}
1935
1936
1937/**
1938 * @opcode 0x3c
1939 * @opflclass arithmetic
1940 */
1941FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1942{
1943 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1944 IEMOP_BODY_BINARY_AL_Ib(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1945}
1946
1947
1948/**
1949 * @opcode 0x3d
1950 * @opflclass arithmetic
1951 */
1952FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1953{
1954 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1955 IEMOP_BODY_BINARY_rAX_Iz_RO(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1956}
1957
1958
1959/**
1960 * @opcode 0x3e
1961 */
1962FNIEMOP_DEF(iemOp_seg_DS)
1963{
1964 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1965 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1966 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1967
1968 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1969 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1970}
1971
1972
1973/**
1974 * @opcode 0x3f
1975 * @opfltest af
1976 * @opflmodify cf,pf,af,zf,sf,of
1977 * @opflundef pf,zf,sf,of
1978 * @opgroup og_gen_arith_dec
1979 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1980 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1981 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1982 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1983 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1984 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1985 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1986 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1987 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1988 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1989 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1990 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1991 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1992 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1993 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1994 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1995 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1996 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1997 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1998 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1999 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
2000 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
2001 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
2002 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
2003 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
2004 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
2005 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
2006 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
2007 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
2008 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
2009 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
2010 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
2011 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
2012 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
2013 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
2014 */
2015FNIEMOP_DEF(iemOp_aas)
2016{
2017 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
2018 IEMOP_HLP_NO_64BIT();
2019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2020 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
2021
2022 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aas);
2023}
2024
2025
2026/**
2027 * Common 'inc/dec register' helper.
2028 *
2029 * Not for 64-bit code, only for what became the rex prefixes.
2030 */
2031#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
2032 switch (pVCpu->iem.s.enmEffOpSize) \
2033 { \
2034 case IEMMODE_16BIT: \
2035 IEM_MC_BEGIN(IEM_MC_F_NOT_64BIT, 0); \
2036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2037 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
2038 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
2039 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
2040 IEM_MC_REF_EFLAGS(pEFlags); \
2041 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
2042 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2043 IEM_MC_END(); \
2044 break; \
2045 \
2046 case IEMMODE_32BIT: \
2047 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
2048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2049 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
2050 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
2051 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
2052 IEM_MC_REF_EFLAGS(pEFlags); \
2053 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
2054 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
2055 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2056 IEM_MC_END(); \
2057 break; \
2058 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2059 } \
2060 (void)0
2061
2062/**
2063 * @opcode 0x40
2064 * @opflclass incdec
2065 */
2066FNIEMOP_DEF(iemOp_inc_eAX)
2067{
2068 /*
2069 * This is a REX prefix in 64-bit mode.
2070 */
2071 if (IEM_IS_64BIT_CODE(pVCpu))
2072 {
2073 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
2074 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
2075
2076 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2077 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2078 }
2079
2080 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
2081 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
2082}
2083
2084
2085/**
2086 * @opcode 0x41
2087 * @opflclass incdec
2088 */
2089FNIEMOP_DEF(iemOp_inc_eCX)
2090{
2091 /*
2092 * This is a REX prefix in 64-bit mode.
2093 */
2094 if (IEM_IS_64BIT_CODE(pVCpu))
2095 {
2096 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
2097 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
2098 pVCpu->iem.s.uRexB = 1 << 3;
2099
2100 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2101 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2102 }
2103
2104 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
2105 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
2106}
2107
2108
2109/**
2110 * @opcode 0x42
2111 * @opflclass incdec
2112 */
2113FNIEMOP_DEF(iemOp_inc_eDX)
2114{
2115 /*
2116 * This is a REX prefix in 64-bit mode.
2117 */
2118 if (IEM_IS_64BIT_CODE(pVCpu))
2119 {
2120 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
2121 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
2122 pVCpu->iem.s.uRexIndex = 1 << 3;
2123
2124 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2125 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2126 }
2127
2128 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
2129 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
2130}
2131
2132
2133
2134/**
2135 * @opcode 0x43
2136 * @opflclass incdec
2137 */
2138FNIEMOP_DEF(iemOp_inc_eBX)
2139{
2140 /*
2141 * This is a REX prefix in 64-bit mode.
2142 */
2143 if (IEM_IS_64BIT_CODE(pVCpu))
2144 {
2145 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
2146 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
2147 pVCpu->iem.s.uRexB = 1 << 3;
2148 pVCpu->iem.s.uRexIndex = 1 << 3;
2149
2150 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2151 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2152 }
2153
2154 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
2155 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
2156}
2157
2158
2159/**
2160 * @opcode 0x44
2161 * @opflclass incdec
2162 */
2163FNIEMOP_DEF(iemOp_inc_eSP)
2164{
2165 /*
2166 * This is a REX prefix in 64-bit mode.
2167 */
2168 if (IEM_IS_64BIT_CODE(pVCpu))
2169 {
2170 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
2171 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
2172 pVCpu->iem.s.uRexReg = 1 << 3;
2173
2174 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2175 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2176 }
2177
2178 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
2179 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
2180}
2181
2182
2183/**
2184 * @opcode 0x45
2185 * @opflclass incdec
2186 */
2187FNIEMOP_DEF(iemOp_inc_eBP)
2188{
2189 /*
2190 * This is a REX prefix in 64-bit mode.
2191 */
2192 if (IEM_IS_64BIT_CODE(pVCpu))
2193 {
2194 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
2195 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
2196 pVCpu->iem.s.uRexReg = 1 << 3;
2197 pVCpu->iem.s.uRexB = 1 << 3;
2198
2199 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2200 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2201 }
2202
2203 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
2204 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
2205}
2206
2207
2208/**
2209 * @opcode 0x46
2210 * @opflclass incdec
2211 */
2212FNIEMOP_DEF(iemOp_inc_eSI)
2213{
2214 /*
2215 * This is a REX prefix in 64-bit mode.
2216 */
2217 if (IEM_IS_64BIT_CODE(pVCpu))
2218 {
2219 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
2220 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
2221 pVCpu->iem.s.uRexReg = 1 << 3;
2222 pVCpu->iem.s.uRexIndex = 1 << 3;
2223
2224 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2225 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2226 }
2227
2228 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
2229 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
2230}
2231
2232
2233/**
2234 * @opcode 0x47
2235 * @opflclass incdec
2236 */
2237FNIEMOP_DEF(iemOp_inc_eDI)
2238{
2239 /*
2240 * This is a REX prefix in 64-bit mode.
2241 */
2242 if (IEM_IS_64BIT_CODE(pVCpu))
2243 {
2244 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
2245 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
2246 pVCpu->iem.s.uRexReg = 1 << 3;
2247 pVCpu->iem.s.uRexB = 1 << 3;
2248 pVCpu->iem.s.uRexIndex = 1 << 3;
2249
2250 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2251 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2252 }
2253
2254 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
2255 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
2256}
2257
2258
2259/**
2260 * @opcode 0x48
2261 * @opflclass incdec
2262 */
2263FNIEMOP_DEF(iemOp_dec_eAX)
2264{
2265 /*
2266 * This is a REX prefix in 64-bit mode.
2267 */
2268 if (IEM_IS_64BIT_CODE(pVCpu))
2269 {
2270 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
2271 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
2272 iemRecalEffOpSize(pVCpu);
2273
2274 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2275 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2276 }
2277
2278 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2279 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2280}
2281
2282
2283/**
2284 * @opcode 0x49
2285 * @opflclass incdec
2286 */
2287FNIEMOP_DEF(iemOp_dec_eCX)
2288{
2289 /*
2290 * This is a REX prefix in 64-bit mode.
2291 */
2292 if (IEM_IS_64BIT_CODE(pVCpu))
2293 {
2294 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2295 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2296 pVCpu->iem.s.uRexB = 1 << 3;
2297 iemRecalEffOpSize(pVCpu);
2298
2299 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2300 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2301 }
2302
2303 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2304 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2305}
2306
2307
2308/**
2309 * @opcode 0x4a
2310 * @opflclass incdec
2311 */
2312FNIEMOP_DEF(iemOp_dec_eDX)
2313{
2314 /*
2315 * This is a REX prefix in 64-bit mode.
2316 */
2317 if (IEM_IS_64BIT_CODE(pVCpu))
2318 {
2319 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2320 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2321 pVCpu->iem.s.uRexIndex = 1 << 3;
2322 iemRecalEffOpSize(pVCpu);
2323
2324 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2325 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2326 }
2327
2328 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2329 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2330}
2331
2332
2333/**
2334 * @opcode 0x4b
2335 * @opflclass incdec
2336 */
2337FNIEMOP_DEF(iemOp_dec_eBX)
2338{
2339 /*
2340 * This is a REX prefix in 64-bit mode.
2341 */
2342 if (IEM_IS_64BIT_CODE(pVCpu))
2343 {
2344 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2345 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2346 pVCpu->iem.s.uRexB = 1 << 3;
2347 pVCpu->iem.s.uRexIndex = 1 << 3;
2348 iemRecalEffOpSize(pVCpu);
2349
2350 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2351 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2352 }
2353
2354 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2355 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2356}
2357
2358
2359/**
2360 * @opcode 0x4c
2361 * @opflclass incdec
2362 */
2363FNIEMOP_DEF(iemOp_dec_eSP)
2364{
2365 /*
2366 * This is a REX prefix in 64-bit mode.
2367 */
2368 if (IEM_IS_64BIT_CODE(pVCpu))
2369 {
2370 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2371 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2372 pVCpu->iem.s.uRexReg = 1 << 3;
2373 iemRecalEffOpSize(pVCpu);
2374
2375 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2376 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2377 }
2378
2379 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2380 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2381}
2382
2383
2384/**
2385 * @opcode 0x4d
2386 * @opflclass incdec
2387 */
2388FNIEMOP_DEF(iemOp_dec_eBP)
2389{
2390 /*
2391 * This is a REX prefix in 64-bit mode.
2392 */
2393 if (IEM_IS_64BIT_CODE(pVCpu))
2394 {
2395 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2396 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2397 pVCpu->iem.s.uRexReg = 1 << 3;
2398 pVCpu->iem.s.uRexB = 1 << 3;
2399 iemRecalEffOpSize(pVCpu);
2400
2401 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2402 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2403 }
2404
2405 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2406 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2407}
2408
2409
2410/**
2411 * @opcode 0x4e
2412 * @opflclass incdec
2413 */
2414FNIEMOP_DEF(iemOp_dec_eSI)
2415{
2416 /*
2417 * This is a REX prefix in 64-bit mode.
2418 */
2419 if (IEM_IS_64BIT_CODE(pVCpu))
2420 {
2421 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2422 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2423 pVCpu->iem.s.uRexReg = 1 << 3;
2424 pVCpu->iem.s.uRexIndex = 1 << 3;
2425 iemRecalEffOpSize(pVCpu);
2426
2427 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2428 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2429 }
2430
2431 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2432 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2433}
2434
2435
2436/**
2437 * @opcode 0x4f
2438 * @opflclass incdec
2439 */
2440FNIEMOP_DEF(iemOp_dec_eDI)
2441{
2442 /*
2443 * This is a REX prefix in 64-bit mode.
2444 */
2445 if (IEM_IS_64BIT_CODE(pVCpu))
2446 {
2447 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2448 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2449 pVCpu->iem.s.uRexReg = 1 << 3;
2450 pVCpu->iem.s.uRexB = 1 << 3;
2451 pVCpu->iem.s.uRexIndex = 1 << 3;
2452 iemRecalEffOpSize(pVCpu);
2453
2454 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2455 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2456 }
2457
2458 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2459 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2460}
2461
2462
2463/**
2464 * Common 'push register' helper.
2465 */
2466FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2467{
2468 if (IEM_IS_64BIT_CODE(pVCpu))
2469 {
2470 iReg |= pVCpu->iem.s.uRexB;
2471 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2472 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2473 }
2474
2475 switch (pVCpu->iem.s.enmEffOpSize)
2476 {
2477 case IEMMODE_16BIT:
2478 IEM_MC_BEGIN(0, 0);
2479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2480 IEM_MC_LOCAL(uint16_t, u16Value);
2481 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2482 IEM_MC_PUSH_U16(u16Value);
2483 IEM_MC_ADVANCE_RIP_AND_FINISH();
2484 IEM_MC_END();
2485 break;
2486
2487 case IEMMODE_32BIT:
2488 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2490 IEM_MC_LOCAL(uint32_t, u32Value);
2491 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2492 IEM_MC_PUSH_U32(u32Value);
2493 IEM_MC_ADVANCE_RIP_AND_FINISH();
2494 IEM_MC_END();
2495 break;
2496
2497 case IEMMODE_64BIT:
2498 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2500 IEM_MC_LOCAL(uint64_t, u64Value);
2501 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2502 IEM_MC_PUSH_U64(u64Value);
2503 IEM_MC_ADVANCE_RIP_AND_FINISH();
2504 IEM_MC_END();
2505 break;
2506
2507 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2508 }
2509}
2510
2511
2512/**
2513 * @opcode 0x50
2514 */
2515FNIEMOP_DEF(iemOp_push_eAX)
2516{
2517 IEMOP_MNEMONIC(push_rAX, "push rAX");
2518 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2519}
2520
2521
2522/**
2523 * @opcode 0x51
2524 */
2525FNIEMOP_DEF(iemOp_push_eCX)
2526{
2527 IEMOP_MNEMONIC(push_rCX, "push rCX");
2528 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2529}
2530
2531
2532/**
2533 * @opcode 0x52
2534 */
2535FNIEMOP_DEF(iemOp_push_eDX)
2536{
2537 IEMOP_MNEMONIC(push_rDX, "push rDX");
2538 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2539}
2540
2541
2542/**
2543 * @opcode 0x53
2544 */
2545FNIEMOP_DEF(iemOp_push_eBX)
2546{
2547 IEMOP_MNEMONIC(push_rBX, "push rBX");
2548 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2549}
2550
2551
2552/**
2553 * @opcode 0x54
2554 */
2555FNIEMOP_DEF(iemOp_push_eSP)
2556{
2557 IEMOP_MNEMONIC(push_rSP, "push rSP");
2558 if (IEM_GET_TARGET_CPU(pVCpu) != IEMTARGETCPU_8086)
2559 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2560
2561 /* 8086 works differently wrt to 'push sp' compared to 80186 and later. */
2562 IEM_MC_BEGIN(IEM_MC_F_ONLY_8086, 0);
2563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2564 IEM_MC_LOCAL(uint16_t, u16Value);
2565 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2566 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2567 IEM_MC_PUSH_U16(u16Value);
2568 IEM_MC_ADVANCE_RIP_AND_FINISH();
2569 IEM_MC_END();
2570}
2571
2572
2573/**
2574 * @opcode 0x55
2575 */
2576FNIEMOP_DEF(iemOp_push_eBP)
2577{
2578 IEMOP_MNEMONIC(push_rBP, "push rBP");
2579 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2580}
2581
2582
2583/**
2584 * @opcode 0x56
2585 */
2586FNIEMOP_DEF(iemOp_push_eSI)
2587{
2588 IEMOP_MNEMONIC(push_rSI, "push rSI");
2589 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2590}
2591
2592
2593/**
2594 * @opcode 0x57
2595 */
2596FNIEMOP_DEF(iemOp_push_eDI)
2597{
2598 IEMOP_MNEMONIC(push_rDI, "push rDI");
2599 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2600}
2601
2602
2603/**
2604 * Common 'pop register' helper.
2605 */
2606FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2607{
2608 if (IEM_IS_64BIT_CODE(pVCpu))
2609 {
2610 iReg |= pVCpu->iem.s.uRexB;
2611 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2612 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2613 }
2614
2615 switch (pVCpu->iem.s.enmEffOpSize)
2616 {
2617 case IEMMODE_16BIT:
2618 IEM_MC_BEGIN(0, 0);
2619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2620 IEM_MC_POP_GREG_U16(iReg);
2621 IEM_MC_ADVANCE_RIP_AND_FINISH();
2622 IEM_MC_END();
2623 break;
2624
2625 case IEMMODE_32BIT:
2626 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2628 IEM_MC_POP_GREG_U32(iReg);
2629 IEM_MC_ADVANCE_RIP_AND_FINISH();
2630 IEM_MC_END();
2631 break;
2632
2633 case IEMMODE_64BIT:
2634 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2636 IEM_MC_POP_GREG_U64(iReg);
2637 IEM_MC_ADVANCE_RIP_AND_FINISH();
2638 IEM_MC_END();
2639 break;
2640
2641 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2642 }
2643}
2644
2645
2646/**
2647 * @opcode 0x58
2648 */
2649FNIEMOP_DEF(iemOp_pop_eAX)
2650{
2651 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2652 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2653}
2654
2655
2656/**
2657 * @opcode 0x59
2658 */
2659FNIEMOP_DEF(iemOp_pop_eCX)
2660{
2661 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2662 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2663}
2664
2665
2666/**
2667 * @opcode 0x5a
2668 */
2669FNIEMOP_DEF(iemOp_pop_eDX)
2670{
2671 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2672 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2673}
2674
2675
2676/**
2677 * @opcode 0x5b
2678 */
2679FNIEMOP_DEF(iemOp_pop_eBX)
2680{
2681 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2682 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2683}
2684
2685
2686/**
2687 * @opcode 0x5c
2688 */
2689FNIEMOP_DEF(iemOp_pop_eSP)
2690{
2691 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2692 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2693}
2694
2695
2696/**
2697 * @opcode 0x5d
2698 */
2699FNIEMOP_DEF(iemOp_pop_eBP)
2700{
2701 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2702 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2703}
2704
2705
2706/**
2707 * @opcode 0x5e
2708 */
2709FNIEMOP_DEF(iemOp_pop_eSI)
2710{
2711 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2712 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2713}
2714
2715
2716/**
2717 * @opcode 0x5f
2718 */
2719FNIEMOP_DEF(iemOp_pop_eDI)
2720{
2721 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2722 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2723}
2724
2725
2726/**
2727 * @opcode 0x60
2728 */
2729FNIEMOP_DEF(iemOp_pusha)
2730{
2731 IEMOP_MNEMONIC(pusha, "pusha");
2732 IEMOP_HLP_MIN_186();
2733 IEMOP_HLP_NO_64BIT();
2734 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2735 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_16);
2736 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2737 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_32);
2738}
2739
2740
2741/**
2742 * @opcode 0x61
2743 */
2744FNIEMOP_DEF(iemOp_popa__mvex)
2745{
2746 if (!IEM_IS_64BIT_CODE(pVCpu))
2747 {
2748 IEMOP_MNEMONIC(popa, "popa");
2749 IEMOP_HLP_MIN_186();
2750 IEMOP_HLP_NO_64BIT();
2751 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2752 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2753 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2754 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2755 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2756 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2757 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2758 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2759 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2760 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2761 iemCImpl_popa_16);
2762 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2763 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2764 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2765 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2766 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2767 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2768 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2769 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2770 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2771 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2772 iemCImpl_popa_32);
2773 }
2774 IEMOP_MNEMONIC(mvex, "mvex");
2775 Log(("mvex prefix is not supported!\n"));
2776 IEMOP_RAISE_INVALID_OPCODE_RET();
2777}
2778
2779
2780/**
2781 * @opcode 0x62
2782 * @opmnemonic bound
2783 * @op1 Gv_RO
2784 * @op2 Ma
2785 * @opmincpu 80186
2786 * @ophints harmless x86_invalid_64
2787 * @optest op1=0 op2=0 ->
2788 * @optest op1=1 op2=0 -> value.xcpt=5
2789 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2790 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2791 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2792 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2793 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2794 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2795 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2796 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2797 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2798 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2799 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2800 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2801 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2802 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2803 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2804 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2805 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2806 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2807 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2808 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2809 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2810 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2811 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2812 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2813 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2814 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2815 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2816 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2817 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2818 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2819 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2820 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2821 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2822 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2823 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2824 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2825 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2826 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2827 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2828 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2829 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2830 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2831 */
2832FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2833{
2834 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2835 compatability mode it is invalid with MOD=3.
2836
2837 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2838 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2839 given as R and X without an exact description, so we assume it builds on
2840 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2841 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2842 uint8_t bRm;
2843 if (!IEM_IS_64BIT_CODE(pVCpu))
2844 {
2845 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2846 IEMOP_HLP_MIN_186();
2847 IEM_OPCODE_GET_NEXT_U8(&bRm);
2848 if (IEM_IS_MODRM_MEM_MODE(bRm))
2849 {
2850 /** @todo testcase: check that there are two memory accesses involved. Check
2851 * whether they're both read before the \#BR triggers. */
2852 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2853 {
2854 IEM_MC_BEGIN(IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
2855 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2856 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2857 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2859
2860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2862
2863 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2864 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2865 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2866
2867 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2868 IEM_MC_END();
2869 }
2870 else /* 32-bit operands */
2871 {
2872 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2873 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2874 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2875 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2876 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2877
2878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2880
2881 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2882 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2883 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2884
2885 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2886 IEM_MC_END();
2887 }
2888 }
2889
2890 /*
2891 * @opdone
2892 */
2893 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2894 {
2895 /* Note that there is no need for the CPU to fetch further bytes
2896 here because MODRM.MOD == 3. */
2897 Log(("evex not supported by the guest CPU!\n"));
2898 IEMOP_RAISE_INVALID_OPCODE_RET();
2899 }
2900 }
2901 else
2902 {
2903 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2904 * does modr/m read, whereas AMD probably doesn't... */
2905 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2906 {
2907 Log(("evex not supported by the guest CPU!\n"));
2908 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2909 }
2910 IEM_OPCODE_GET_NEXT_U8(&bRm);
2911 }
2912
2913 IEMOP_MNEMONIC(evex, "evex");
2914 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2915 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2916 Log(("evex prefix is not implemented!\n"));
2917 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2918}
2919
2920
2921/**
2922 * @opcode 0x63
2923 * @opflmodify zf
2924 * @note non-64-bit modes.
2925 */
2926FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2927{
2928 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2929 IEMOP_HLP_MIN_286();
2930 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2931 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2932
2933 if (IEM_IS_MODRM_REG_MODE(bRm))
2934 {
2935 /* Register */
2936 IEM_MC_BEGIN(IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2937 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2938 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2939 IEM_MC_ARG(uint16_t, u16Src, 1);
2940 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2941
2942 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2943 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2944 IEM_MC_REF_EFLAGS(pEFlags);
2945 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2946
2947 IEM_MC_ADVANCE_RIP_AND_FINISH();
2948 IEM_MC_END();
2949 }
2950 else
2951 {
2952 /* Memory */
2953 IEM_MC_BEGIN(IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2954 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2955 IEM_MC_ARG(uint16_t, u16Src, 1);
2956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2957 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2958
2959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2960 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2961 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2962 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2963 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2964 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2965
2966 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
2967 IEM_MC_COMMIT_EFLAGS(EFlags);
2968 IEM_MC_ADVANCE_RIP_AND_FINISH();
2969 IEM_MC_END();
2970 }
2971}
2972
2973
2974/**
2975 * @opcode 0x63
2976 *
2977 * @note This is a weird one. It works like a regular move instruction if
2978 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2979 * @todo This definitely needs a testcase to verify the odd cases. */
2980FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2981{
2982 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2983
2984 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2985 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2986
2987 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2988 {
2989 if (IEM_IS_MODRM_REG_MODE(bRm))
2990 {
2991 /*
2992 * Register to register.
2993 */
2994 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2996 IEM_MC_LOCAL(uint64_t, u64Value);
2997 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2998 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2999 IEM_MC_ADVANCE_RIP_AND_FINISH();
3000 IEM_MC_END();
3001 }
3002 else
3003 {
3004 /*
3005 * We're loading a register from memory.
3006 */
3007 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3008 IEM_MC_LOCAL(uint64_t, u64Value);
3009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3012 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3013 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
3014 IEM_MC_ADVANCE_RIP_AND_FINISH();
3015 IEM_MC_END();
3016 }
3017 }
3018 else
3019 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
3020}
3021
3022
3023/**
3024 * @opcode 0x64
3025 * @opmnemonic segfs
3026 * @opmincpu 80386
3027 * @opgroup og_prefixes
3028 */
3029FNIEMOP_DEF(iemOp_seg_FS)
3030{
3031 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
3032 IEMOP_HLP_MIN_386();
3033
3034 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
3035 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
3036
3037 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3038 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3039}
3040
3041
3042/**
3043 * @opcode 0x65
3044 * @opmnemonic seggs
3045 * @opmincpu 80386
3046 * @opgroup og_prefixes
3047 */
3048FNIEMOP_DEF(iemOp_seg_GS)
3049{
3050 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
3051 IEMOP_HLP_MIN_386();
3052
3053 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
3054 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
3055
3056 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3057 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3058}
3059
3060
3061/**
3062 * @opcode 0x66
3063 * @opmnemonic opsize
3064 * @openc prefix
3065 * @opmincpu 80386
3066 * @ophints harmless
3067 * @opgroup og_prefixes
3068 */
3069FNIEMOP_DEF(iemOp_op_size)
3070{
3071 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
3072 IEMOP_HLP_MIN_386();
3073
3074 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
3075 iemRecalEffOpSize(pVCpu);
3076
3077 /* For the 4 entry opcode tables, the operand prefix doesn't not count
3078 when REPZ or REPNZ are present. */
3079 if (pVCpu->iem.s.idxPrefix == 0)
3080 pVCpu->iem.s.idxPrefix = 1;
3081
3082 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3083 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3084}
3085
3086
3087/**
3088 * @opcode 0x67
3089 * @opmnemonic addrsize
3090 * @openc prefix
3091 * @opmincpu 80386
3092 * @ophints harmless
3093 * @opgroup og_prefixes
3094 */
3095FNIEMOP_DEF(iemOp_addr_size)
3096{
3097 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
3098 IEMOP_HLP_MIN_386();
3099
3100 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
3101 switch (pVCpu->iem.s.enmDefAddrMode)
3102 {
3103 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
3104 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
3105 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
3106 default: AssertFailed();
3107 }
3108
3109 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3110 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3111}
3112
3113
3114/**
3115 * @opcode 0x68
3116 */
3117FNIEMOP_DEF(iemOp_push_Iz)
3118{
3119 IEMOP_MNEMONIC(push_Iz, "push Iz");
3120 IEMOP_HLP_MIN_186();
3121 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3122 switch (pVCpu->iem.s.enmEffOpSize)
3123 {
3124 case IEMMODE_16BIT:
3125 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0);
3126 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3128 IEM_MC_LOCAL_CONST(uint16_t, u16Value, u16Imm);
3129 IEM_MC_PUSH_U16(u16Value);
3130 IEM_MC_ADVANCE_RIP_AND_FINISH();
3131 IEM_MC_END();
3132 break;
3133
3134 case IEMMODE_32BIT:
3135 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3136 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3138 IEM_MC_LOCAL_CONST(uint32_t, u32Value, u32Imm);
3139 IEM_MC_PUSH_U32(u32Value);
3140 IEM_MC_ADVANCE_RIP_AND_FINISH();
3141 IEM_MC_END();
3142 break;
3143
3144 case IEMMODE_64BIT:
3145 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3146 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3148 IEM_MC_LOCAL_CONST(uint64_t, u64Value, u64Imm);
3149 IEM_MC_PUSH_U64(u64Value);
3150 IEM_MC_ADVANCE_RIP_AND_FINISH();
3151 IEM_MC_END();
3152 break;
3153
3154 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3155 }
3156}
3157
3158
3159/**
3160 * @opcode 0x69
3161 * @opflclass multiply
3162 */
3163FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
3164{
3165 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
3166 IEMOP_HLP_MIN_186();
3167 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3168 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3169
3170 switch (pVCpu->iem.s.enmEffOpSize)
3171 {
3172 case IEMMODE_16BIT:
3173 {
3174 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3175 if (IEM_IS_MODRM_REG_MODE(bRm))
3176 {
3177 /* register operand */
3178 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3179 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0);
3180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3181 IEM_MC_LOCAL(uint16_t, u16Tmp);
3182 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3183 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3184 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
3185 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3186 IEM_MC_REF_EFLAGS(pEFlags);
3187 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3188 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3189
3190 IEM_MC_ADVANCE_RIP_AND_FINISH();
3191 IEM_MC_END();
3192 }
3193 else
3194 {
3195 /* memory operand */
3196 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0);
3197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
3199
3200 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3202
3203 IEM_MC_LOCAL(uint16_t, u16Tmp);
3204 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3205
3206 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3207 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3208 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3209 IEM_MC_REF_EFLAGS(pEFlags);
3210 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3211 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3212
3213 IEM_MC_ADVANCE_RIP_AND_FINISH();
3214 IEM_MC_END();
3215 }
3216 break;
3217 }
3218
3219 case IEMMODE_32BIT:
3220 {
3221 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3222 if (IEM_IS_MODRM_REG_MODE(bRm))
3223 {
3224 /* register operand */
3225 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3226 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
3227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3228 IEM_MC_LOCAL(uint32_t, u32Tmp);
3229 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3230
3231 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3232 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
3233 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3234 IEM_MC_REF_EFLAGS(pEFlags);
3235 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3236 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3237
3238 IEM_MC_ADVANCE_RIP_AND_FINISH();
3239 IEM_MC_END();
3240 }
3241 else
3242 {
3243 /* memory operand */
3244 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
3245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3247
3248 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3250
3251 IEM_MC_LOCAL(uint32_t, u32Tmp);
3252 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3253
3254 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3255 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3256 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3257 IEM_MC_REF_EFLAGS(pEFlags);
3258 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3259 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3260
3261 IEM_MC_ADVANCE_RIP_AND_FINISH();
3262 IEM_MC_END();
3263 }
3264 break;
3265 }
3266
3267 case IEMMODE_64BIT:
3268 {
3269 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3270 if (IEM_IS_MODRM_REG_MODE(bRm))
3271 {
3272 /* register operand */
3273 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3274 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3276 IEM_MC_LOCAL(uint64_t, u64Tmp);
3277 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3278
3279 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3280 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
3281 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3282 IEM_MC_REF_EFLAGS(pEFlags);
3283 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3284 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3285
3286 IEM_MC_ADVANCE_RIP_AND_FINISH();
3287 IEM_MC_END();
3288 }
3289 else
3290 {
3291 /* memory operand */
3292 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3295
3296 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /* parameter count for the threaded function for this block. */
3298
3299 IEM_MC_LOCAL(uint64_t, u64Tmp);
3300 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3301
3302 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3303 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int32_t)u32Imm, 1);
3304 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3305 IEM_MC_REF_EFLAGS(pEFlags);
3306 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3307 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3308
3309 IEM_MC_ADVANCE_RIP_AND_FINISH();
3310 IEM_MC_END();
3311 }
3312 break;
3313 }
3314
3315 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3316 }
3317}
3318
3319
3320/**
3321 * @opcode 0x6a
3322 */
3323FNIEMOP_DEF(iemOp_push_Ib)
3324{
3325 IEMOP_MNEMONIC(push_Ib, "push Ib");
3326 IEMOP_HLP_MIN_186();
3327 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3328 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3329
3330 switch (pVCpu->iem.s.enmEffOpSize)
3331 {
3332 case IEMMODE_16BIT:
3333 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0);
3334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3335 IEM_MC_LOCAL_CONST(uint16_t, uValue, (int16_t)i8Imm);
3336 IEM_MC_PUSH_U16(uValue);
3337 IEM_MC_ADVANCE_RIP_AND_FINISH();
3338 IEM_MC_END();
3339 break;
3340 case IEMMODE_32BIT:
3341 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3343 IEM_MC_LOCAL_CONST(uint32_t, uValue, (int32_t)i8Imm);
3344 IEM_MC_PUSH_U32(uValue);
3345 IEM_MC_ADVANCE_RIP_AND_FINISH();
3346 IEM_MC_END();
3347 break;
3348 case IEMMODE_64BIT:
3349 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3351 IEM_MC_LOCAL_CONST(uint64_t, uValue, (int64_t)i8Imm);
3352 IEM_MC_PUSH_U64(uValue);
3353 IEM_MC_ADVANCE_RIP_AND_FINISH();
3354 IEM_MC_END();
3355 break;
3356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3357 }
3358}
3359
3360
3361/**
3362 * @opcode 0x6b
3363 * @opflclass multiply
3364 */
3365FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3366{
3367 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3368 IEMOP_HLP_MIN_186();
3369 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3370 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3371
3372 switch (pVCpu->iem.s.enmEffOpSize)
3373 {
3374 case IEMMODE_16BIT:
3375 {
3376 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3377 if (IEM_IS_MODRM_REG_MODE(bRm))
3378 {
3379 /* register operand */
3380 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0);
3381 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3383
3384 IEM_MC_LOCAL(uint16_t, u16Tmp);
3385 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3386
3387 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3388 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3389 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3390 IEM_MC_REF_EFLAGS(pEFlags);
3391 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3392 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3393
3394 IEM_MC_ADVANCE_RIP_AND_FINISH();
3395 IEM_MC_END();
3396 }
3397 else
3398 {
3399 /* memory operand */
3400 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0);
3401
3402 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3404
3405 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3407
3408 IEM_MC_LOCAL(uint16_t, u16Tmp);
3409 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3410
3411 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3412 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3413 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3414 IEM_MC_REF_EFLAGS(pEFlags);
3415 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3416 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3417
3418 IEM_MC_ADVANCE_RIP_AND_FINISH();
3419 IEM_MC_END();
3420 }
3421 break;
3422 }
3423
3424 case IEMMODE_32BIT:
3425 {
3426 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3427 if (IEM_IS_MODRM_REG_MODE(bRm))
3428 {
3429 /* register operand */
3430 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3431 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
3432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3433 IEM_MC_LOCAL(uint32_t, u32Tmp);
3434 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3435
3436 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3437 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3438 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3439 IEM_MC_REF_EFLAGS(pEFlags);
3440 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3441 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3442
3443 IEM_MC_ADVANCE_RIP_AND_FINISH();
3444 IEM_MC_END();
3445 }
3446 else
3447 {
3448 /* memory operand */
3449 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
3450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3452
3453 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3455
3456 IEM_MC_LOCAL(uint32_t, u32Tmp);
3457 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3458
3459 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3460 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3461 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3462 IEM_MC_REF_EFLAGS(pEFlags);
3463 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3464 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3465
3466 IEM_MC_ADVANCE_RIP_AND_FINISH();
3467 IEM_MC_END();
3468 }
3469 break;
3470 }
3471
3472 case IEMMODE_64BIT:
3473 {
3474 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3475 if (IEM_IS_MODRM_REG_MODE(bRm))
3476 {
3477 /* register operand */
3478 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3479 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3481 IEM_MC_LOCAL(uint64_t, u64Tmp);
3482 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3483
3484 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3485 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3486 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3487 IEM_MC_REF_EFLAGS(pEFlags);
3488 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3489 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3490
3491 IEM_MC_ADVANCE_RIP_AND_FINISH();
3492 IEM_MC_END();
3493 }
3494 else
3495 {
3496 /* memory operand */
3497 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3500
3501 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the threaded parameter count. */
3502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3503
3504 IEM_MC_LOCAL(uint64_t, u64Tmp);
3505 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3506
3507 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3508 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3509 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3510 IEM_MC_REF_EFLAGS(pEFlags);
3511 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3512 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3513
3514 IEM_MC_ADVANCE_RIP_AND_FINISH();
3515 IEM_MC_END();
3516 }
3517 break;
3518 }
3519
3520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3521 }
3522}
3523
3524
3525/**
3526 * @opcode 0x6c
3527 * @opfltest iopl,df
3528 */
3529FNIEMOP_DEF(iemOp_insb_Yb_DX)
3530{
3531 IEMOP_HLP_MIN_186();
3532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3533 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3534 {
3535 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3536 switch (pVCpu->iem.s.enmEffAddrMode)
3537 {
3538 case IEMMODE_16BIT:
3539 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3540 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3541 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3542 iemCImpl_rep_ins_op8_addr16, false);
3543 case IEMMODE_32BIT:
3544 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3545 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3546 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3547 iemCImpl_rep_ins_op8_addr32, false);
3548 case IEMMODE_64BIT:
3549 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3550 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3551 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3552 iemCImpl_rep_ins_op8_addr64, false);
3553 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3554 }
3555 }
3556 else
3557 {
3558 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3559 switch (pVCpu->iem.s.enmEffAddrMode)
3560 {
3561 case IEMMODE_16BIT:
3562 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3563 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3564 iemCImpl_ins_op8_addr16, false);
3565 case IEMMODE_32BIT:
3566 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3567 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3568 iemCImpl_ins_op8_addr32, false);
3569 case IEMMODE_64BIT:
3570 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3571 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3572 iemCImpl_ins_op8_addr64, false);
3573 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3574 }
3575 }
3576}
3577
3578
3579/**
3580 * @opcode 0x6d
3581 * @opfltest iopl,df
3582 */
3583FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3584{
3585 IEMOP_HLP_MIN_186();
3586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3587 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3588 {
3589 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3590 switch (pVCpu->iem.s.enmEffOpSize)
3591 {
3592 case IEMMODE_16BIT:
3593 switch (pVCpu->iem.s.enmEffAddrMode)
3594 {
3595 case IEMMODE_16BIT:
3596 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3597 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3598 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3599 iemCImpl_rep_ins_op16_addr16, false);
3600 case IEMMODE_32BIT:
3601 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3602 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3603 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3604 iemCImpl_rep_ins_op16_addr32, false);
3605 case IEMMODE_64BIT:
3606 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3607 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3608 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3609 iemCImpl_rep_ins_op16_addr64, false);
3610 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3611 }
3612 break;
3613 case IEMMODE_64BIT:
3614 case IEMMODE_32BIT:
3615 switch (pVCpu->iem.s.enmEffAddrMode)
3616 {
3617 case IEMMODE_16BIT:
3618 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3619 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3620 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3621 iemCImpl_rep_ins_op32_addr16, false);
3622 case IEMMODE_32BIT:
3623 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3624 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3625 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3626 iemCImpl_rep_ins_op32_addr32, false);
3627 case IEMMODE_64BIT:
3628 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3629 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3630 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3631 iemCImpl_rep_ins_op32_addr64, false);
3632 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3633 }
3634 break;
3635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3636 }
3637 }
3638 else
3639 {
3640 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3641 switch (pVCpu->iem.s.enmEffOpSize)
3642 {
3643 case IEMMODE_16BIT:
3644 switch (pVCpu->iem.s.enmEffAddrMode)
3645 {
3646 case IEMMODE_16BIT:
3647 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3648 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3649 iemCImpl_ins_op16_addr16, false);
3650 case IEMMODE_32BIT:
3651 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3652 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3653 iemCImpl_ins_op16_addr32, false);
3654 case IEMMODE_64BIT:
3655 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3656 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3657 iemCImpl_ins_op16_addr64, false);
3658 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3659 }
3660 break;
3661 case IEMMODE_64BIT:
3662 case IEMMODE_32BIT:
3663 switch (pVCpu->iem.s.enmEffAddrMode)
3664 {
3665 case IEMMODE_16BIT:
3666 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3667 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3668 iemCImpl_ins_op32_addr16, false);
3669 case IEMMODE_32BIT:
3670 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3671 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3672 iemCImpl_ins_op32_addr32, false);
3673 case IEMMODE_64BIT:
3674 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3675 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3676 iemCImpl_ins_op32_addr64, false);
3677 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3678 }
3679 break;
3680 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3681 }
3682 }
3683}
3684
3685
3686/**
3687 * @opcode 0x6e
3688 * @opfltest iopl,df
3689 */
3690FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3691{
3692 IEMOP_HLP_MIN_186();
3693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3694 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3695 {
3696 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3697 switch (pVCpu->iem.s.enmEffAddrMode)
3698 {
3699 case IEMMODE_16BIT:
3700 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3701 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3702 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3703 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3704 case IEMMODE_32BIT:
3705 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3706 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3707 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3708 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3709 case IEMMODE_64BIT:
3710 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3711 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3712 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3713 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3715 }
3716 }
3717 else
3718 {
3719 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3720 switch (pVCpu->iem.s.enmEffAddrMode)
3721 {
3722 case IEMMODE_16BIT:
3723 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3724 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3725 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3726 case IEMMODE_32BIT:
3727 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3728 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3729 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3730 case IEMMODE_64BIT:
3731 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3732 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3733 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3734 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3735 }
3736 }
3737}
3738
3739
3740/**
3741 * @opcode 0x6f
3742 * @opfltest iopl,df
3743 */
3744FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3745{
3746 IEMOP_HLP_MIN_186();
3747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3748 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3749 {
3750 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3751 switch (pVCpu->iem.s.enmEffOpSize)
3752 {
3753 case IEMMODE_16BIT:
3754 switch (pVCpu->iem.s.enmEffAddrMode)
3755 {
3756 case IEMMODE_16BIT:
3757 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3758 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3759 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3760 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3761 case IEMMODE_32BIT:
3762 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3763 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3764 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3765 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3766 case IEMMODE_64BIT:
3767 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3768 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3769 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3770 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3771 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3772 }
3773 break;
3774 case IEMMODE_64BIT:
3775 case IEMMODE_32BIT:
3776 switch (pVCpu->iem.s.enmEffAddrMode)
3777 {
3778 case IEMMODE_16BIT:
3779 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3780 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3781 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3782 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3783 case IEMMODE_32BIT:
3784 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3785 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3786 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3787 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3788 case IEMMODE_64BIT:
3789 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3790 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3791 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3792 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3793 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3794 }
3795 break;
3796 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3797 }
3798 }
3799 else
3800 {
3801 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3802 switch (pVCpu->iem.s.enmEffOpSize)
3803 {
3804 case IEMMODE_16BIT:
3805 switch (pVCpu->iem.s.enmEffAddrMode)
3806 {
3807 case IEMMODE_16BIT:
3808 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3809 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3810 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3811 case IEMMODE_32BIT:
3812 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3813 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3814 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3815 case IEMMODE_64BIT:
3816 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3817 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3818 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3819 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3820 }
3821 break;
3822 case IEMMODE_64BIT:
3823 case IEMMODE_32BIT:
3824 switch (pVCpu->iem.s.enmEffAddrMode)
3825 {
3826 case IEMMODE_16BIT:
3827 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3828 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3829 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3830 case IEMMODE_32BIT:
3831 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3832 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3833 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3834 case IEMMODE_64BIT:
3835 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3836 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3837 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3839 }
3840 break;
3841 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3842 }
3843 }
3844}
3845
3846
3847/**
3848 * @opcode 0x70
3849 * @opfltest of
3850 */
3851FNIEMOP_DEF(iemOp_jo_Jb)
3852{
3853 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3854 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3855 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3856
3857 IEM_MC_BEGIN(0, 0);
3858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3859 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3860 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3861 } IEM_MC_ELSE() {
3862 IEM_MC_ADVANCE_RIP_AND_FINISH();
3863 } IEM_MC_ENDIF();
3864 IEM_MC_END();
3865}
3866
3867
3868/**
3869 * @opcode 0x71
3870 * @opfltest of
3871 */
3872FNIEMOP_DEF(iemOp_jno_Jb)
3873{
3874 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3875 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3876 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3877
3878 IEM_MC_BEGIN(0, 0);
3879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3880 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3881 IEM_MC_ADVANCE_RIP_AND_FINISH();
3882 } IEM_MC_ELSE() {
3883 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3884 } IEM_MC_ENDIF();
3885 IEM_MC_END();
3886}
3887
3888/**
3889 * @opcode 0x72
3890 * @opfltest cf
3891 */
3892FNIEMOP_DEF(iemOp_jc_Jb)
3893{
3894 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3895 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3896 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3897
3898 IEM_MC_BEGIN(0, 0);
3899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3900 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3901 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3902 } IEM_MC_ELSE() {
3903 IEM_MC_ADVANCE_RIP_AND_FINISH();
3904 } IEM_MC_ENDIF();
3905 IEM_MC_END();
3906}
3907
3908
3909/**
3910 * @opcode 0x73
3911 * @opfltest cf
3912 */
3913FNIEMOP_DEF(iemOp_jnc_Jb)
3914{
3915 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3916 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3917 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3918
3919 IEM_MC_BEGIN(0, 0);
3920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3921 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3922 IEM_MC_ADVANCE_RIP_AND_FINISH();
3923 } IEM_MC_ELSE() {
3924 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3925 } IEM_MC_ENDIF();
3926 IEM_MC_END();
3927}
3928
3929
3930/**
3931 * @opcode 0x74
3932 * @opfltest zf
3933 */
3934FNIEMOP_DEF(iemOp_je_Jb)
3935{
3936 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3937 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3938 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3939
3940 IEM_MC_BEGIN(0, 0);
3941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3942 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3943 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3944 } IEM_MC_ELSE() {
3945 IEM_MC_ADVANCE_RIP_AND_FINISH();
3946 } IEM_MC_ENDIF();
3947 IEM_MC_END();
3948}
3949
3950
3951/**
3952 * @opcode 0x75
3953 * @opfltest zf
3954 */
3955FNIEMOP_DEF(iemOp_jne_Jb)
3956{
3957 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3958 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3959 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3960
3961 IEM_MC_BEGIN(0, 0);
3962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3963 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3964 IEM_MC_ADVANCE_RIP_AND_FINISH();
3965 } IEM_MC_ELSE() {
3966 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3967 } IEM_MC_ENDIF();
3968 IEM_MC_END();
3969}
3970
3971
3972/**
3973 * @opcode 0x76
3974 * @opfltest cf,zf
3975 */
3976FNIEMOP_DEF(iemOp_jbe_Jb)
3977{
3978 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3979 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3980 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3981
3982 IEM_MC_BEGIN(0, 0);
3983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3984 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3985 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3986 } IEM_MC_ELSE() {
3987 IEM_MC_ADVANCE_RIP_AND_FINISH();
3988 } IEM_MC_ENDIF();
3989 IEM_MC_END();
3990}
3991
3992
3993/**
3994 * @opcode 0x77
3995 * @opfltest cf,zf
3996 */
3997FNIEMOP_DEF(iemOp_jnbe_Jb)
3998{
3999 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
4000 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4001 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4002
4003 IEM_MC_BEGIN(0, 0);
4004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4005 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4006 IEM_MC_ADVANCE_RIP_AND_FINISH();
4007 } IEM_MC_ELSE() {
4008 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4009 } IEM_MC_ENDIF();
4010 IEM_MC_END();
4011}
4012
4013
4014/**
4015 * @opcode 0x78
4016 * @opfltest sf
4017 */
4018FNIEMOP_DEF(iemOp_js_Jb)
4019{
4020 IEMOP_MNEMONIC(js_Jb, "js Jb");
4021 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4022 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4023
4024 IEM_MC_BEGIN(0, 0);
4025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4026 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4027 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4028 } IEM_MC_ELSE() {
4029 IEM_MC_ADVANCE_RIP_AND_FINISH();
4030 } IEM_MC_ENDIF();
4031 IEM_MC_END();
4032}
4033
4034
4035/**
4036 * @opcode 0x79
4037 * @opfltest sf
4038 */
4039FNIEMOP_DEF(iemOp_jns_Jb)
4040{
4041 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
4042 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4043 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4044
4045 IEM_MC_BEGIN(0, 0);
4046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4047 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4048 IEM_MC_ADVANCE_RIP_AND_FINISH();
4049 } IEM_MC_ELSE() {
4050 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4051 } IEM_MC_ENDIF();
4052 IEM_MC_END();
4053}
4054
4055
4056/**
4057 * @opcode 0x7a
4058 * @opfltest pf
4059 */
4060FNIEMOP_DEF(iemOp_jp_Jb)
4061{
4062 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
4063 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4064 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4065
4066 IEM_MC_BEGIN(0, 0);
4067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4068 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4069 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4070 } IEM_MC_ELSE() {
4071 IEM_MC_ADVANCE_RIP_AND_FINISH();
4072 } IEM_MC_ENDIF();
4073 IEM_MC_END();
4074}
4075
4076
4077/**
4078 * @opcode 0x7b
4079 * @opfltest pf
4080 */
4081FNIEMOP_DEF(iemOp_jnp_Jb)
4082{
4083 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
4084 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4085 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4086
4087 IEM_MC_BEGIN(0, 0);
4088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4089 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4090 IEM_MC_ADVANCE_RIP_AND_FINISH();
4091 } IEM_MC_ELSE() {
4092 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4093 } IEM_MC_ENDIF();
4094 IEM_MC_END();
4095}
4096
4097
4098/**
4099 * @opcode 0x7c
4100 * @opfltest sf,of
4101 */
4102FNIEMOP_DEF(iemOp_jl_Jb)
4103{
4104 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
4105 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4106 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4107
4108 IEM_MC_BEGIN(0, 0);
4109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4110 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4111 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4112 } IEM_MC_ELSE() {
4113 IEM_MC_ADVANCE_RIP_AND_FINISH();
4114 } IEM_MC_ENDIF();
4115 IEM_MC_END();
4116}
4117
4118
4119/**
4120 * @opcode 0x7d
4121 * @opfltest sf,of
4122 */
4123FNIEMOP_DEF(iemOp_jnl_Jb)
4124{
4125 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
4126 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4127 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4128
4129 IEM_MC_BEGIN(0, 0);
4130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4131 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4132 IEM_MC_ADVANCE_RIP_AND_FINISH();
4133 } IEM_MC_ELSE() {
4134 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4135 } IEM_MC_ENDIF();
4136 IEM_MC_END();
4137}
4138
4139
4140/**
4141 * @opcode 0x7e
4142 * @opfltest zf,sf,of
4143 */
4144FNIEMOP_DEF(iemOp_jle_Jb)
4145{
4146 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
4147 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4148 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4149
4150 IEM_MC_BEGIN(0, 0);
4151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4152 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4153 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4154 } IEM_MC_ELSE() {
4155 IEM_MC_ADVANCE_RIP_AND_FINISH();
4156 } IEM_MC_ENDIF();
4157 IEM_MC_END();
4158}
4159
4160
4161/**
4162 * @opcode 0x7f
4163 * @opfltest zf,sf,of
4164 */
4165FNIEMOP_DEF(iemOp_jnle_Jb)
4166{
4167 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
4168 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4169 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4170
4171 IEM_MC_BEGIN(0, 0);
4172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4173 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4174 IEM_MC_ADVANCE_RIP_AND_FINISH();
4175 } IEM_MC_ELSE() {
4176 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4177 } IEM_MC_ENDIF();
4178 IEM_MC_END();
4179}
4180
4181
4182/**
4183 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4184 * iemOp_Grp1_Eb_Ib_80.
4185 */
4186#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_InsNm, a_fRegNativeArchs, a_fMemNativeArchs) \
4187 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4188 { \
4189 /* register target */ \
4190 IEM_MC_BEGIN(0, 0); \
4191 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4193 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
4194 IEM_MC_LOCAL(uint8_t, u8Dst); \
4195 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4196 IEM_MC_LOCAL_EFLAGS( uEFlags); \
4197 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u8Dst, u8Imm, uEFlags, 8, 8); \
4198 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Dst); \
4199 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4200 } IEM_MC_NATIVE_ELSE() { \
4201 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4202 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4203 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4204 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4205 IEM_MC_REF_EFLAGS(pEFlags); \
4206 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
4207 } IEM_MC_NATIVE_ENDIF(); \
4208 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4209 IEM_MC_END(); \
4210 } \
4211 else \
4212 { \
4213 /* memory target */ \
4214 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4215 { \
4216 IEM_MC_BEGIN(0, 0); \
4217 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4219 \
4220 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4221 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4222 IEMOP_HLP_DONE_DECODING(); \
4223 \
4224 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4225 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4226 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4227 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4228 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
4229 \
4230 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4231 IEM_MC_COMMIT_EFLAGS(EFlags); \
4232 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4233 IEM_MC_END(); \
4234 } \
4235 else \
4236 { \
4237 IEM_MC_BEGIN(0, 0); \
4238 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4240 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4241 \
4242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4243 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4244 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4245 IEMOP_HLP_DONE_DECODING(); \
4246 \
4247 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4248 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4249 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8_locked), pu8Dst, u8Src, pEFlags); \
4250 \
4251 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4252 IEM_MC_COMMIT_EFLAGS(EFlags); \
4253 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4254 IEM_MC_END(); \
4255 } \
4256 } \
4257 (void)0
4258
4259#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_InsNm, a_fNativeArchs) \
4260 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4261 { \
4262 /* register target */ \
4263 IEM_MC_BEGIN(0, 0); \
4264 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4266 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4267 IEM_MC_LOCAL(uint8_t, u8Dst); \
4268 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4269 IEM_MC_LOCAL_EFLAGS(uEFlags); \
4270 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u8Dst, u8Imm, uEFlags, 8, 8); \
4271 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4272 } IEM_MC_NATIVE_ELSE() { \
4273 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4274 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
4275 IEM_MC_REF_GREG_U8_CONST(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4276 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4277 IEM_MC_REF_EFLAGS(pEFlags); \
4278 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
4279 } IEM_MC_NATIVE_ENDIF(); \
4280 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4281 IEM_MC_END(); \
4282 } \
4283 else \
4284 { \
4285 /* memory target */ \
4286 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4287 { \
4288 IEM_MC_BEGIN(0, 0); \
4289 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4291 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4292 IEMOP_HLP_DONE_DECODING(); \
4293 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4294 IEM_MC_LOCAL(uint8_t, u8Dst); \
4295 IEM_MC_FETCH_MEM_U8(u8Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4296 IEM_MC_LOCAL_EFLAGS(uEFlags); \
4297 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u8Dst, u8Imm, uEFlags, 8, 8); \
4298 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4299 } IEM_MC_NATIVE_ELSE() { \
4300 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4301 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
4302 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4303 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4304 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4305 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
4306 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4307 IEM_MC_COMMIT_EFLAGS(EFlags); \
4308 } IEM_MC_NATIVE_ENDIF(); \
4309 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4310 IEM_MC_END(); \
4311 } \
4312 else \
4313 { \
4314 IEMOP_HLP_DONE_DECODING(); \
4315 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4316 } \
4317 } \
4318 (void)0
4319
4320
4321
4322/**
4323 * @opmaps grp1_80,grp1_83
4324 * @opcode /0
4325 * @opflclass arithmetic
4326 */
4327FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
4328{
4329 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
4330 IEMOP_BODY_BINARY_Eb_Ib_RW(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4331}
4332
4333
4334/**
4335 * @opmaps grp1_80,grp1_83
4336 * @opcode /1
4337 * @opflclass logical
4338 */
4339FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
4340{
4341 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
4342 IEMOP_BODY_BINARY_Eb_Ib_RW(or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4343}
4344
4345
4346/**
4347 * @opmaps grp1_80,grp1_83
4348 * @opcode /2
4349 * @opflclass arithmetic_carry
4350 */
4351FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4352{
4353 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4354 IEMOP_BODY_BINARY_Eb_Ib_RW(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4355}
4356
4357
4358/**
4359 * @opmaps grp1_80,grp1_83
4360 * @opcode /3
4361 * @opflclass arithmetic_carry
4362 */
4363FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4364{
4365 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4366 IEMOP_BODY_BINARY_Eb_Ib_RW(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4367}
4368
4369
4370/**
4371 * @opmaps grp1_80,grp1_83
4372 * @opcode /4
4373 * @opflclass logical
4374 */
4375FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4376{
4377 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4378 IEMOP_BODY_BINARY_Eb_Ib_RW(and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4379}
4380
4381
4382/**
4383 * @opmaps grp1_80,grp1_83
4384 * @opcode /5
4385 * @opflclass arithmetic
4386 */
4387FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4388{
4389 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4390 IEMOP_BODY_BINARY_Eb_Ib_RW(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4391}
4392
4393
4394/**
4395 * @opmaps grp1_80,grp1_83
4396 * @opcode /6
4397 * @opflclass logical
4398 */
4399FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4400{
4401 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4402 IEMOP_BODY_BINARY_Eb_Ib_RW(xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4403}
4404
4405
4406/**
4407 * @opmaps grp1_80,grp1_83
4408 * @opcode /7
4409 * @opflclass arithmetic
4410 */
4411FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4412{
4413 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4414 IEMOP_BODY_BINARY_Eb_Ib_RO(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
4415}
4416
4417
4418/**
4419 * @opcode 0x80
4420 */
4421FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4422{
4423 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4424 switch (IEM_GET_MODRM_REG_8(bRm))
4425 {
4426 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4427 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4428 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4429 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4430 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4431 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4432 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4433 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4434 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4435 }
4436}
4437
4438
4439/**
4440 * Body for a group 1 binary operator.
4441 */
4442#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_InsNm, a_fRegNativeArchs, a_fMemNativeArchs) \
4443 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4444 { \
4445 /* register target */ \
4446 switch (pVCpu->iem.s.enmEffOpSize) \
4447 { \
4448 case IEMMODE_16BIT: \
4449 { \
4450 IEM_MC_BEGIN(0, 0); \
4451 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4453 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
4454 IEM_MC_LOCAL(uint16_t, u16Dst); \
4455 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4456 IEM_MC_LOCAL(uint32_t, uEFlags); \
4457 IEM_MC_FETCH_EFLAGS(uEFlags); \
4458 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
4459 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Dst); \
4460 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4461 } IEM_MC_NATIVE_ELSE() { \
4462 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4463 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4464 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4465 IEM_MC_REF_EFLAGS(pEFlags); \
4466 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4467 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
4468 } IEM_MC_NATIVE_ENDIF(); \
4469 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4470 IEM_MC_END(); \
4471 break; \
4472 } \
4473 \
4474 case IEMMODE_32BIT: \
4475 { \
4476 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4477 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4479 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
4480 IEM_MC_LOCAL(uint32_t, u32Dst); \
4481 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4482 IEM_MC_LOCAL(uint32_t, uEFlags); \
4483 IEM_MC_FETCH_EFLAGS(uEFlags); \
4484 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
4485 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst); \
4486 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4487 } IEM_MC_NATIVE_ELSE() { \
4488 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4489 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4490 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4491 IEM_MC_REF_EFLAGS(pEFlags); \
4492 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4493 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
4494 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4495 } IEM_MC_NATIVE_ENDIF(); \
4496 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4497 IEM_MC_END(); \
4498 break; \
4499 } \
4500 \
4501 case IEMMODE_64BIT: \
4502 { \
4503 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4504 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4506 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
4507 IEM_MC_LOCAL(uint64_t, u64Dst); \
4508 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4509 IEM_MC_LOCAL(uint32_t, uEFlags); \
4510 IEM_MC_FETCH_EFLAGS(uEFlags); \
4511 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
4512 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst); \
4513 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4514 } IEM_MC_NATIVE_ELSE() { \
4515 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4516 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4517 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4518 IEM_MC_REF_EFLAGS(pEFlags); \
4519 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4520 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
4521 } IEM_MC_NATIVE_ENDIF(); \
4522 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4523 IEM_MC_END(); \
4524 break; \
4525 } \
4526 \
4527 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4528 } \
4529 } \
4530 else \
4531 { \
4532 /* memory target */ \
4533 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4534 { \
4535 switch (pVCpu->iem.s.enmEffOpSize) \
4536 { \
4537 case IEMMODE_16BIT: \
4538 { \
4539 IEM_MC_BEGIN(0, 0); \
4540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4542 \
4543 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4544 IEMOP_HLP_DONE_DECODING(); \
4545 \
4546 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4547 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4548 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4549 \
4550 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4551 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4552 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
4553 \
4554 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4555 IEM_MC_COMMIT_EFLAGS(EFlags); \
4556 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4557 IEM_MC_END(); \
4558 break; \
4559 } \
4560 \
4561 case IEMMODE_32BIT: \
4562 { \
4563 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4566 \
4567 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4568 IEMOP_HLP_DONE_DECODING(); \
4569 \
4570 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4571 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4572 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4573 \
4574 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4575 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4576 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
4577 \
4578 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4579 IEM_MC_COMMIT_EFLAGS(EFlags); \
4580 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4581 IEM_MC_END(); \
4582 break; \
4583 } \
4584 \
4585 case IEMMODE_64BIT: \
4586 { \
4587 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4588 \
4589 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4590 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4591 \
4592 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4593 IEMOP_HLP_DONE_DECODING(); \
4594 \
4595 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4596 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4597 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4598 \
4599 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4600 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4601 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
4602 \
4603 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4604 IEM_MC_COMMIT_EFLAGS(EFlags); \
4605 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4606 IEM_MC_END(); \
4607 break; \
4608 } \
4609 \
4610 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4611 } \
4612 } \
4613 else \
4614 { \
4615 switch (pVCpu->iem.s.enmEffOpSize) \
4616 { \
4617 case IEMMODE_16BIT: \
4618 { \
4619 IEM_MC_BEGIN(0, 0); \
4620 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4622 \
4623 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4624 IEMOP_HLP_DONE_DECODING(); \
4625 \
4626 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4627 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4628 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4629 \
4630 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4631 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4632 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16_locked), pu16Dst, u16Src, pEFlags); \
4633 \
4634 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4635 IEM_MC_COMMIT_EFLAGS(EFlags); \
4636 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4637 IEM_MC_END(); \
4638 break; \
4639 } \
4640 \
4641 case IEMMODE_32BIT: \
4642 { \
4643 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4646 \
4647 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4648 IEMOP_HLP_DONE_DECODING(); \
4649 \
4650 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4651 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4652 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4653 \
4654 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4655 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4656 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32_locked), pu32Dst, u32Src, pEFlags); \
4657 \
4658 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4659 IEM_MC_COMMIT_EFLAGS(EFlags); \
4660 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4661 IEM_MC_END(); \
4662 break; \
4663 } \
4664 \
4665 case IEMMODE_64BIT: \
4666 { \
4667 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4670 \
4671 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4672 IEMOP_HLP_DONE_DECODING(); \
4673 \
4674 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4675 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4676 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4677 \
4678 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4679 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4680 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64_locked), pu64Dst, u64Src, pEFlags); \
4681 \
4682 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4683 IEM_MC_COMMIT_EFLAGS(EFlags); \
4684 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4685 IEM_MC_END(); \
4686 break; \
4687 } \
4688 \
4689 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4690 } \
4691 } \
4692 } \
4693 (void)0
4694
4695/* read-only version */
4696#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_InsNm, a_fNativeArchs) \
4697 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4698 { \
4699 /* register target */ \
4700 switch (pVCpu->iem.s.enmEffOpSize) \
4701 { \
4702 case IEMMODE_16BIT: \
4703 { \
4704 IEM_MC_BEGIN(0, 0); \
4705 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4707 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4708 IEM_MC_LOCAL(uint16_t, u16Dst); \
4709 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4710 IEM_MC_LOCAL(uint32_t, uEFlags); \
4711 IEM_MC_FETCH_EFLAGS(uEFlags); \
4712 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
4713 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4714 } IEM_MC_NATIVE_ELSE() { \
4715 IEM_MC_ARG(uint16_t const *,pu16Dst, 0); \
4716 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4717 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4718 IEM_MC_REF_EFLAGS(pEFlags); \
4719 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4720 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
4721 } IEM_MC_NATIVE_ENDIF(); \
4722 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4723 IEM_MC_END(); \
4724 break; \
4725 } \
4726 \
4727 case IEMMODE_32BIT: \
4728 { \
4729 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4730 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4732 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4733 IEM_MC_LOCAL(uint32_t, u32Dst); \
4734 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4735 IEM_MC_LOCAL(uint32_t, uEFlags); \
4736 IEM_MC_FETCH_EFLAGS(uEFlags); \
4737 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
4738 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4739 } IEM_MC_NATIVE_ELSE() { \
4740 IEM_MC_ARG(uint32_t const *,pu32Dst, 0); \
4741 IEM_MC_REF_GREG_U32_CONST (pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4742 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4743 IEM_MC_REF_EFLAGS(pEFlags); \
4744 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4745 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
4746 } IEM_MC_NATIVE_ENDIF(); \
4747 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4748 IEM_MC_END(); \
4749 break; \
4750 } \
4751 \
4752 case IEMMODE_64BIT: \
4753 { \
4754 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4755 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4757 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4758 IEM_MC_LOCAL(uint64_t, u64Dst); \
4759 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4760 IEM_MC_LOCAL(uint32_t, uEFlags); \
4761 IEM_MC_FETCH_EFLAGS(uEFlags); \
4762 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
4763 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4764 } IEM_MC_NATIVE_ELSE() { \
4765 IEM_MC_ARG(uint64_t const *,pu64Dst, 0); \
4766 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4767 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4768 IEM_MC_REF_EFLAGS(pEFlags); \
4769 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4770 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
4771 } IEM_MC_NATIVE_ENDIF(); \
4772 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4773 IEM_MC_END(); \
4774 break; \
4775 } \
4776 \
4777 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4778 } \
4779 } \
4780 else \
4781 { \
4782 /* memory target */ \
4783 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4784 { \
4785 switch (pVCpu->iem.s.enmEffOpSize) \
4786 { \
4787 case IEMMODE_16BIT: \
4788 { \
4789 IEM_MC_BEGIN(0, 0); \
4790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4792 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4793 IEMOP_HLP_DONE_DECODING(); \
4794 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4795 IEM_MC_LOCAL(uint16_t, u16Dst); \
4796 IEM_MC_FETCH_MEM_U16(u16Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4797 IEM_MC_LOCAL_EFLAGS(uEFlags); \
4798 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
4799 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4800 } IEM_MC_NATIVE_ELSE() { \
4801 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4802 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4803 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4804 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4805 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4806 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
4807 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4808 IEM_MC_COMMIT_EFLAGS(EFlags); \
4809 } IEM_MC_NATIVE_ENDIF(); \
4810 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4811 IEM_MC_END(); \
4812 break; \
4813 } \
4814 \
4815 case IEMMODE_32BIT: \
4816 { \
4817 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4820 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4821 IEMOP_HLP_DONE_DECODING(); \
4822 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4823 IEM_MC_LOCAL(uint32_t, u32Dst); \
4824 IEM_MC_FETCH_MEM_U32(u32Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4825 IEM_MC_LOCAL_EFLAGS(uEFlags); \
4826 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
4827 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4828 } IEM_MC_NATIVE_ELSE() { \
4829 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4830 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4831 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4832 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4833 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4834 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
4835 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4836 IEM_MC_COMMIT_EFLAGS(EFlags); \
4837 } IEM_MC_NATIVE_ENDIF(); \
4838 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4839 IEM_MC_END(); \
4840 break; \
4841 } \
4842 \
4843 case IEMMODE_64BIT: \
4844 { \
4845 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4848 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4849 IEMOP_HLP_DONE_DECODING(); \
4850 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4851 IEM_MC_LOCAL(uint64_t, u64Dst); \
4852 IEM_MC_FETCH_MEM_U64(u64Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4853 IEM_MC_LOCAL_EFLAGS( uEFlags); \
4854 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
4855 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
4856 } IEM_MC_NATIVE_ELSE() { \
4857 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4858 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4859 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4860 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4861 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4862 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
4863 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4864 IEM_MC_COMMIT_EFLAGS(EFlags); \
4865 } IEM_MC_NATIVE_ENDIF(); \
4866 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4867 IEM_MC_END(); \
4868 break; \
4869 } \
4870 \
4871 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4872 } \
4873 } \
4874 else \
4875 { \
4876 IEMOP_HLP_DONE_DECODING(); \
4877 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4878 } \
4879 } \
4880 (void)0
4881
4882
4883/**
4884 * @opmaps grp1_81
4885 * @opcode /0
4886 * @opflclass arithmetic
4887 */
4888FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4889{
4890 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4891 IEMOP_BODY_BINARY_Ev_Iz_RW(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4892}
4893
4894
4895/**
4896 * @opmaps grp1_81
4897 * @opcode /1
4898 * @opflclass logical
4899 */
4900FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4901{
4902 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4903 IEMOP_BODY_BINARY_Ev_Iz_RW(or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4904}
4905
4906
4907/**
4908 * @opmaps grp1_81
4909 * @opcode /2
4910 * @opflclass arithmetic_carry
4911 */
4912FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4913{
4914 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4915 IEMOP_BODY_BINARY_Ev_Iz_RW(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4916}
4917
4918
4919/**
4920 * @opmaps grp1_81
4921 * @opcode /3
4922 * @opflclass arithmetic_carry
4923 */
4924FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4925{
4926 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4927 IEMOP_BODY_BINARY_Ev_Iz_RW(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4928}
4929
4930
4931/**
4932 * @opmaps grp1_81
4933 * @opcode /4
4934 * @opflclass logical
4935 */
4936FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4937{
4938 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4939 IEMOP_BODY_BINARY_Ev_Iz_RW(and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4940}
4941
4942
4943/**
4944 * @opmaps grp1_81
4945 * @opcode /5
4946 * @opflclass arithmetic
4947 */
4948FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4949{
4950 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4951 IEMOP_BODY_BINARY_Ev_Iz_RW(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4952}
4953
4954
4955/**
4956 * @opmaps grp1_81
4957 * @opcode /6
4958 * @opflclass logical
4959 */
4960FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4961{
4962 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4963 IEMOP_BODY_BINARY_Ev_Iz_RW(xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4964}
4965
4966
4967/**
4968 * @opmaps grp1_81
4969 * @opcode /7
4970 * @opflclass arithmetic
4971 */
4972FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4973{
4974 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4975 IEMOP_BODY_BINARY_Ev_Iz_RO(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
4976}
4977
4978
4979/**
4980 * @opcode 0x81
4981 */
4982FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4983{
4984 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4985 switch (IEM_GET_MODRM_REG_8(bRm))
4986 {
4987 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4988 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4989 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4990 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4991 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4992 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4993 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4994 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4995 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4996 }
4997}
4998
4999
5000/**
5001 * @opcode 0x82
5002 * @opmnemonic grp1_82
5003 * @opgroup og_groups
5004 */
5005FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
5006{
5007 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
5008 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
5009}
5010
5011
5012/**
5013 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
5014 * iemOp_Grp1_Ev_Ib.
5015 */
5016#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_InsNm, a_fRegNativeArchs, a_fMemNativeArchs) \
5017 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5018 { \
5019 /* \
5020 * Register target \
5021 */ \
5022 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not sign extending it here saves threaded function param space. */ \
5023 switch (pVCpu->iem.s.enmEffOpSize) \
5024 { \
5025 case IEMMODE_16BIT: \
5026 IEM_MC_BEGIN(0, 0); \
5027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5028 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
5029 IEM_MC_LOCAL(uint16_t, u16Dst); \
5030 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5031 IEM_MC_LOCAL(uint32_t, uEFlags); \
5032 IEM_MC_FETCH_EFLAGS(uEFlags); \
5033 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, (uint16_t)(int16_t)(int8_t)u8Imm, uEFlags, 16, 8); \
5034 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Dst); \
5035 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5036 } IEM_MC_NATIVE_ELSE() { \
5037 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
5038 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5039 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5040 IEM_MC_REF_EFLAGS(pEFlags); \
5041 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5042 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
5043 } IEM_MC_NATIVE_ENDIF(); \
5044 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5045 IEM_MC_END(); \
5046 break; \
5047 \
5048 case IEMMODE_32BIT: \
5049 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5051 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
5052 IEM_MC_LOCAL(uint32_t, u32Dst); \
5053 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5054 IEM_MC_LOCAL(uint32_t, uEFlags); \
5055 IEM_MC_FETCH_EFLAGS(uEFlags); \
5056 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, (uint32_t)(int32_t)(int8_t)u8Imm, uEFlags, 32, 8); \
5057 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst); \
5058 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5059 } IEM_MC_NATIVE_ELSE() { \
5060 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
5061 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5062 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5063 IEM_MC_REF_EFLAGS(pEFlags); \
5064 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5065 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
5066 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
5067 } IEM_MC_NATIVE_ENDIF(); \
5068 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5069 IEM_MC_END(); \
5070 break; \
5071 \
5072 case IEMMODE_64BIT: \
5073 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5075 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
5076 IEM_MC_LOCAL(uint64_t, u64Dst); \
5077 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5078 IEM_MC_LOCAL(uint32_t, uEFlags); \
5079 IEM_MC_FETCH_EFLAGS(uEFlags); \
5080 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, (uint64_t)(int64_t)(int8_t)u8Imm, uEFlags, 64, 8); \
5081 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst); \
5082 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5083 } IEM_MC_NATIVE_ELSE() { \
5084 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
5085 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5086 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5087 IEM_MC_REF_EFLAGS(pEFlags); \
5088 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5089 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
5090 } IEM_MC_NATIVE_ENDIF(); \
5091 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5092 IEM_MC_END(); \
5093 break; \
5094 \
5095 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5096 } \
5097 } \
5098 else \
5099 { \
5100 /* \
5101 * Memory target. \
5102 */ \
5103 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
5104 { \
5105 switch (pVCpu->iem.s.enmEffOpSize) \
5106 { \
5107 case IEMMODE_16BIT: \
5108 IEM_MC_BEGIN(0, 0); \
5109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5111 \
5112 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5113 IEMOP_HLP_DONE_DECODING(); \
5114 \
5115 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5116 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
5117 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5118 \
5119 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5120 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5121 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
5122 \
5123 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
5124 IEM_MC_COMMIT_EFLAGS(EFlags); \
5125 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5126 IEM_MC_END(); \
5127 break; \
5128 \
5129 case IEMMODE_32BIT: \
5130 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5133 \
5134 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5135 IEMOP_HLP_DONE_DECODING(); \
5136 \
5137 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5138 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
5139 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5140 \
5141 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5142 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5143 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
5144 \
5145 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
5146 IEM_MC_COMMIT_EFLAGS(EFlags); \
5147 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5148 IEM_MC_END(); \
5149 break; \
5150 \
5151 case IEMMODE_64BIT: \
5152 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5155 \
5156 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5157 IEMOP_HLP_DONE_DECODING(); \
5158 \
5159 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5160 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
5161 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5162 \
5163 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5164 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5165 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
5166 \
5167 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
5168 IEM_MC_COMMIT_EFLAGS(EFlags); \
5169 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5170 IEM_MC_END(); \
5171 break; \
5172 \
5173 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5174 } \
5175 } \
5176 else \
5177 { \
5178 switch (pVCpu->iem.s.enmEffOpSize) \
5179 { \
5180 case IEMMODE_16BIT: \
5181 IEM_MC_BEGIN(0, 0); \
5182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5184 \
5185 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5186 IEMOP_HLP_DONE_DECODING(); \
5187 \
5188 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5189 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
5190 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5191 \
5192 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5193 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5194 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16_locked), pu16Dst, u16Src, pEFlags); \
5195 \
5196 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5197 IEM_MC_COMMIT_EFLAGS(EFlags); \
5198 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5199 IEM_MC_END(); \
5200 break; \
5201 \
5202 case IEMMODE_32BIT: \
5203 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5206 \
5207 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5208 IEMOP_HLP_DONE_DECODING(); \
5209 \
5210 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5211 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
5212 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5213 \
5214 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5215 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5216 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32_locked), pu32Dst, u32Src, pEFlags); \
5217 \
5218 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5219 IEM_MC_COMMIT_EFLAGS(EFlags); \
5220 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5221 IEM_MC_END(); \
5222 break; \
5223 \
5224 case IEMMODE_64BIT: \
5225 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5228 \
5229 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5230 IEMOP_HLP_DONE_DECODING(); \
5231 \
5232 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5233 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
5234 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5235 \
5236 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5237 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5238 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64_locked), pu64Dst, u64Src, pEFlags); \
5239 \
5240 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5241 IEM_MC_COMMIT_EFLAGS(EFlags); \
5242 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5243 IEM_MC_END(); \
5244 break; \
5245 \
5246 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5247 } \
5248 } \
5249 } \
5250 (void)0
5251
5252/* read-only variant */
5253#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_InsNm, a_fNativeArchs) \
5254 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5255 { \
5256 /* \
5257 * Register target \
5258 */ \
5259 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not sign extending it here saves threaded function param space. */ \
5260 switch (pVCpu->iem.s.enmEffOpSize) \
5261 { \
5262 case IEMMODE_16BIT: \
5263 IEM_MC_BEGIN(0, 0); \
5264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5265 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5266 IEM_MC_LOCAL(uint16_t, u16Dst); \
5267 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5268 IEM_MC_LOCAL(uint32_t, uEFlags); \
5269 IEM_MC_FETCH_EFLAGS(uEFlags); \
5270 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, (uint16_t)(int16_t)(int8_t)u8Imm, uEFlags, 16, 8); \
5271 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5272 } IEM_MC_NATIVE_ELSE() { \
5273 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
5274 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5275 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5276 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5277 IEM_MC_REF_EFLAGS(pEFlags); \
5278 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
5279 } IEM_MC_NATIVE_ENDIF(); \
5280 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5281 IEM_MC_END(); \
5282 break; \
5283 \
5284 case IEMMODE_32BIT: \
5285 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5287 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5288 IEM_MC_LOCAL(uint32_t, u32Dst); \
5289 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5290 IEM_MC_LOCAL(uint32_t, uEFlags); \
5291 IEM_MC_FETCH_EFLAGS(uEFlags); \
5292 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, (uint32_t)(int32_t)(int8_t)u8Imm, uEFlags, 32, 8); \
5293 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5294 } IEM_MC_NATIVE_ELSE() { \
5295 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
5296 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5297 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5298 IEM_MC_REF_EFLAGS(pEFlags); \
5299 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5300 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
5301 } IEM_MC_NATIVE_ENDIF(); \
5302 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5303 IEM_MC_END(); \
5304 break; \
5305 \
5306 case IEMMODE_64BIT: \
5307 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5309 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5310 IEM_MC_LOCAL(uint64_t, u64Dst); \
5311 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5312 IEM_MC_LOCAL(uint32_t, uEFlags); \
5313 IEM_MC_FETCH_EFLAGS(uEFlags); \
5314 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, (uint64_t)(int64_t)(int8_t)u8Imm, uEFlags, 64, 8); \
5315 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5316 } IEM_MC_NATIVE_ELSE() { \
5317 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
5318 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5319 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5320 IEM_MC_REF_EFLAGS(pEFlags); \
5321 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5322 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
5323 } IEM_MC_NATIVE_ENDIF(); \
5324 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5325 IEM_MC_END(); \
5326 break; \
5327 \
5328 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5329 } \
5330 } \
5331 else \
5332 { \
5333 /* \
5334 * Memory target. \
5335 */ \
5336 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
5337 { \
5338 switch (pVCpu->iem.s.enmEffOpSize) \
5339 { \
5340 case IEMMODE_16BIT: \
5341 IEM_MC_BEGIN(0, 0); \
5342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5344 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5345 IEMOP_HLP_DONE_DECODING(); \
5346 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5347 IEM_MC_LOCAL(uint16_t, u16Dst); \
5348 IEM_MC_FETCH_MEM_U16(u16Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5349 IEM_MC_LOCAL_EFLAGS( uEFlags); \
5350 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, (uint16_t)(int16_t)(int8_t)u8Imm, uEFlags, 16, 8); \
5351 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5352 } IEM_MC_NATIVE_ELSE() { \
5353 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5354 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
5355 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5356 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5357 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5358 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
5359 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5360 IEM_MC_COMMIT_EFLAGS(EFlags); \
5361 } IEM_MC_NATIVE_ENDIF(); \
5362 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5363 IEM_MC_END(); \
5364 break; \
5365 \
5366 case IEMMODE_32BIT: \
5367 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5370 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5371 IEMOP_HLP_DONE_DECODING(); \
5372 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5373 IEM_MC_LOCAL(uint32_t, u32Dst); \
5374 IEM_MC_FETCH_MEM_U32(u32Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5375 IEM_MC_LOCAL_EFLAGS( uEFlags); \
5376 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, (uint32_t)(int32_t)(int8_t)u8Imm, uEFlags, 32, 8); \
5377 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5378 } IEM_MC_NATIVE_ELSE() { \
5379 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5380 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
5381 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5382 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5383 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5384 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
5385 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5386 IEM_MC_COMMIT_EFLAGS(EFlags); \
5387 } IEM_MC_NATIVE_ENDIF(); \
5388 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5389 IEM_MC_END(); \
5390 break; \
5391 \
5392 case IEMMODE_64BIT: \
5393 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5396 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5397 IEMOP_HLP_DONE_DECODING(); \
5398 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
5399 IEM_MC_LOCAL(uint64_t, u64Dst); \
5400 IEM_MC_FETCH_MEM_U64(u64Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5401 IEM_MC_LOCAL_EFLAGS( uEFlags); \
5402 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, (uint64_t)(int64_t)(int8_t)u8Imm, uEFlags, 64, 8); \
5403 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags); \
5404 } IEM_MC_NATIVE_ELSE() { \
5405 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5406 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
5407 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5408 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5409 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5410 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
5411 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5412 IEM_MC_COMMIT_EFLAGS(EFlags); \
5413 } IEM_MC_NATIVE_ENDIF(); \
5414 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5415 IEM_MC_END(); \
5416 break; \
5417 \
5418 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5419 } \
5420 } \
5421 else \
5422 { \
5423 IEMOP_HLP_DONE_DECODING(); \
5424 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5425 } \
5426 } \
5427 (void)0
5428
5429/**
5430 * @opmaps grp1_83
5431 * @opcode /0
5432 * @opflclass arithmetic
5433 */
5434FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5435{
5436 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5437 IEMOP_BODY_BINARY_Ev_Ib_RW(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5438}
5439
5440
5441/**
5442 * @opmaps grp1_83
5443 * @opcode /1
5444 * @opflclass logical
5445 */
5446FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5447{
5448 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5449 IEMOP_BODY_BINARY_Ev_Ib_RW(or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5450}
5451
5452
5453/**
5454 * @opmaps grp1_83
5455 * @opcode /2
5456 * @opflclass arithmetic_carry
5457 */
5458FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5459{
5460 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5461 IEMOP_BODY_BINARY_Ev_Ib_RW(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5462}
5463
5464
5465/**
5466 * @opmaps grp1_83
5467 * @opcode /3
5468 * @opflclass arithmetic_carry
5469 */
5470FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5471{
5472 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5473 IEMOP_BODY_BINARY_Ev_Ib_RW(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5474}
5475
5476
5477/**
5478 * @opmaps grp1_83
5479 * @opcode /4
5480 * @opflclass logical
5481 */
5482FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5483{
5484 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5485 IEMOP_BODY_BINARY_Ev_Ib_RW(and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5486}
5487
5488
5489/**
5490 * @opmaps grp1_83
5491 * @opcode /5
5492 * @opflclass arithmetic
5493 */
5494FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5495{
5496 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5497 IEMOP_BODY_BINARY_Ev_Ib_RW(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5498}
5499
5500
5501/**
5502 * @opmaps grp1_83
5503 * @opcode /6
5504 * @opflclass logical
5505 */
5506FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5507{
5508 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5509 IEMOP_BODY_BINARY_Ev_Ib_RW(xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5510}
5511
5512
5513/**
5514 * @opmaps grp1_83
5515 * @opcode /7
5516 * @opflclass arithmetic
5517 */
5518FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5519{
5520 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5521 IEMOP_BODY_BINARY_Ev_Ib_RO(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5522}
5523
5524
5525/**
5526 * @opcode 0x83
5527 */
5528FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5529{
5530 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5531 to the 386 even if absent in the intel reference manuals and some
5532 3rd party opcode listings. */
5533 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5534 switch (IEM_GET_MODRM_REG_8(bRm))
5535 {
5536 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5537 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5538 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5539 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5540 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5541 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5542 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5543 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5544 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5545 }
5546}
5547
5548
5549/**
5550 * @opcode 0x84
5551 * @opflclass logical
5552 */
5553FNIEMOP_DEF(iemOp_test_Eb_Gb)
5554{
5555 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5556 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5557
5558 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5559
5560 /*
5561 * Deal with special case of 'test rN, rN' which is frequently used for testing for zero/non-zero registers.
5562 * This block only makes a differences when emitting native code, where we'll save a register fetch.
5563 */
5564 if ( (bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT))
5565 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB)
5566 {
5567 IEM_MC_BEGIN(0, 0);
5568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5569 IEM_MC_ARG(uint8_t, u8Src, 1);
5570 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5571 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5572 IEM_MC_LOCAL_EFLAGS(uEFlags);
5573 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u8Src, u8Src, uEFlags, 8);
5574 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags);
5575 } IEM_MC_NATIVE_ELSE() {
5576 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5577 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* == IEM_GET_MODRM_RM(pVCpu, bRm) */
5578 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5579 IEM_MC_REF_EFLAGS(pEFlags);
5580 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
5581 } IEM_MC_NATIVE_ENDIF();
5582 IEM_MC_ADVANCE_RIP_AND_FINISH();
5583 IEM_MC_END();
5584 }
5585
5586 IEMOP_BODY_BINARY_rm_r8_RO(bRm, iemAImpl_test_u8, test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5587}
5588
5589
5590/**
5591 * @opcode 0x85
5592 * @opflclass logical
5593 */
5594FNIEMOP_DEF(iemOp_test_Ev_Gv)
5595{
5596 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5597 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5598
5599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5600
5601 /*
5602 * Deal with special case of 'test rN, rN' which is frequently used for testing for zero/non-zero registers.
5603 * This block only makes a differences when emitting native code, where we'll save a register fetch.
5604 */
5605 if ( (bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT))
5606 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB)
5607 {
5608 switch (pVCpu->iem.s.enmEffOpSize)
5609 {
5610 case IEMMODE_16BIT:
5611 IEM_MC_BEGIN(0, 0);
5612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5613 IEM_MC_ARG(uint16_t, u16Src, 1);
5614 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5615 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5616 IEM_MC_LOCAL_EFLAGS(uEFlags);
5617 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u16Src, u16Src, uEFlags, 16);
5618 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags);
5619 } IEM_MC_NATIVE_ELSE() {
5620 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5621 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* == IEM_GET_MODRM_RM(pVCpu, bRm) */
5622 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5623 IEM_MC_REF_EFLAGS(pEFlags);
5624 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
5625 } IEM_MC_NATIVE_ENDIF();
5626 IEM_MC_ADVANCE_RIP_AND_FINISH();
5627 IEM_MC_END();
5628 break;
5629
5630 case IEMMODE_32BIT:
5631 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
5632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5633 IEM_MC_ARG(uint32_t, u32Src, 1);
5634 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5635 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5636 IEM_MC_LOCAL_EFLAGS(uEFlags);
5637 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u32Src, u32Src, uEFlags, 32);
5638 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags);
5639 } IEM_MC_NATIVE_ELSE() {
5640 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5641 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* == IEM_GET_MODRM_RM(pVCpu, bRm) */
5642 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5643 IEM_MC_REF_EFLAGS(pEFlags);
5644 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
5645 } IEM_MC_NATIVE_ENDIF();
5646 IEM_MC_ADVANCE_RIP_AND_FINISH();
5647 IEM_MC_END();
5648 break;
5649
5650 case IEMMODE_64BIT:
5651 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
5652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5653 IEM_MC_ARG(uint64_t, u64Src, 1);
5654 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5655 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5656 IEM_MC_LOCAL_EFLAGS(uEFlags);
5657 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u64Src, u64Src, uEFlags, 64);
5658 IEM_MC_COMMIT_EFLAGS_OPT(uEFlags);
5659 } IEM_MC_NATIVE_ELSE() {
5660 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5661 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* == IEM_GET_MODRM_RM(pVCpu, bRm) */
5662 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5663 IEM_MC_REF_EFLAGS(pEFlags);
5664 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
5665 } IEM_MC_NATIVE_ENDIF();
5666 IEM_MC_ADVANCE_RIP_AND_FINISH();
5667 IEM_MC_END();
5668 break;
5669
5670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5671 }
5672 }
5673
5674 IEMOP_BODY_BINARY_rm_rv_RO(bRm, test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5675}
5676
5677
5678/**
5679 * @opcode 0x86
5680 */
5681FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5682{
5683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5684 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5685
5686 /*
5687 * If rm is denoting a register, no more instruction bytes.
5688 */
5689 if (IEM_IS_MODRM_REG_MODE(bRm))
5690 {
5691 IEM_MC_BEGIN(0, 0);
5692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5693 IEM_MC_LOCAL(uint8_t, uTmp1);
5694 IEM_MC_LOCAL(uint8_t, uTmp2);
5695
5696 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5697 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5698 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5699 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5700
5701 IEM_MC_ADVANCE_RIP_AND_FINISH();
5702 IEM_MC_END();
5703 }
5704 else
5705 {
5706 /*
5707 * We're accessing memory.
5708 */
5709#define IEMOP_XCHG_BYTE(a_fnWorker, a_Style) \
5710 IEM_MC_BEGIN(0, 0); \
5711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5712 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5713 IEM_MC_LOCAL(uint8_t, uTmpReg); \
5714 IEM_MC_ARG(uint8_t *, pu8Mem, 0); \
5715 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1); \
5716 \
5717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5718 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5719 IEM_MC_MEM_MAP_U8_##a_Style(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5720 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5721 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker, pu8Mem, pu8Reg); \
5722 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Style(bUnmapInfo); \
5723 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5724 \
5725 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5726 IEM_MC_END()
5727
5728 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5729 {
5730 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_locked,ATOMIC);
5731 }
5732 else
5733 {
5734 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_unlocked,RW);
5735 }
5736 }
5737}
5738
5739
5740/**
5741 * @opcode 0x87
5742 */
5743FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5744{
5745 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5746 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5747
5748 /*
5749 * If rm is denoting a register, no more instruction bytes.
5750 */
5751 if (IEM_IS_MODRM_REG_MODE(bRm))
5752 {
5753 switch (pVCpu->iem.s.enmEffOpSize)
5754 {
5755 case IEMMODE_16BIT:
5756 IEM_MC_BEGIN(0, 0);
5757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5758 IEM_MC_LOCAL(uint16_t, uTmp1);
5759 IEM_MC_LOCAL(uint16_t, uTmp2);
5760
5761 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5762 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5763 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5764 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5765
5766 IEM_MC_ADVANCE_RIP_AND_FINISH();
5767 IEM_MC_END();
5768 break;
5769
5770 case IEMMODE_32BIT:
5771 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
5772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5773 IEM_MC_LOCAL(uint32_t, uTmp1);
5774 IEM_MC_LOCAL(uint32_t, uTmp2);
5775
5776 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5777 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5778 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5779 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5780
5781 IEM_MC_ADVANCE_RIP_AND_FINISH();
5782 IEM_MC_END();
5783 break;
5784
5785 case IEMMODE_64BIT:
5786 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
5787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5788 IEM_MC_LOCAL(uint64_t, uTmp1);
5789 IEM_MC_LOCAL(uint64_t, uTmp2);
5790
5791 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5792 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5793 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5794 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5795
5796 IEM_MC_ADVANCE_RIP_AND_FINISH();
5797 IEM_MC_END();
5798 break;
5799
5800 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5801 }
5802 }
5803 else
5804 {
5805 /*
5806 * We're accessing memory.
5807 */
5808#define IEMOP_XCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
5809 do { \
5810 switch (pVCpu->iem.s.enmEffOpSize) \
5811 { \
5812 case IEMMODE_16BIT: \
5813 IEM_MC_BEGIN(0, 0); \
5814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5815 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5816 IEM_MC_LOCAL(uint16_t, uTmpReg); \
5817 IEM_MC_ARG(uint16_t *, pu16Mem, 0); \
5818 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1); \
5819 \
5820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5821 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5822 IEM_MC_MEM_MAP_U16_##a_Type(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5823 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5824 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker16, pu16Mem, pu16Reg); \
5825 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5826 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5827 \
5828 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5829 IEM_MC_END(); \
5830 break; \
5831 \
5832 case IEMMODE_32BIT: \
5833 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
5834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5835 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5836 IEM_MC_LOCAL(uint32_t, uTmpReg); \
5837 IEM_MC_ARG(uint32_t *, pu32Mem, 0); \
5838 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1); \
5839 \
5840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5841 IEMOP_HLP_DONE_DECODING(); \
5842 IEM_MC_MEM_MAP_U32_##a_Type(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5843 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5844 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker32, pu32Mem, pu32Reg); \
5845 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5846 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5847 \
5848 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5849 IEM_MC_END(); \
5850 break; \
5851 \
5852 case IEMMODE_64BIT: \
5853 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5855 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5856 IEM_MC_LOCAL(uint64_t, uTmpReg); \
5857 IEM_MC_ARG(uint64_t *, pu64Mem, 0); \
5858 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1); \
5859 \
5860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5861 IEMOP_HLP_DONE_DECODING(); \
5862 IEM_MC_MEM_MAP_U64_##a_Type(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5863 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5864 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker64, pu64Mem, pu64Reg); \
5865 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5866 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5867 \
5868 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5869 IEM_MC_END(); \
5870 break; \
5871 \
5872 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5873 } \
5874 } while (0)
5875 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5876 {
5877 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_locked, iemAImpl_xchg_u32_locked, iemAImpl_xchg_u64_locked,ATOMIC);
5878 }
5879 else
5880 {
5881 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_unlocked, iemAImpl_xchg_u32_unlocked, iemAImpl_xchg_u64_unlocked,RW);
5882 }
5883 }
5884}
5885
5886
5887/**
5888 * @opcode 0x88
5889 */
5890FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5891{
5892 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5893
5894 uint8_t bRm;
5895 IEM_OPCODE_GET_NEXT_U8(&bRm);
5896
5897 /*
5898 * If rm is denoting a register, no more instruction bytes.
5899 */
5900 if (IEM_IS_MODRM_REG_MODE(bRm))
5901 {
5902 IEM_MC_BEGIN(0, 0);
5903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5904 IEM_MC_LOCAL(uint8_t, u8Value);
5905 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5906 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5907 IEM_MC_ADVANCE_RIP_AND_FINISH();
5908 IEM_MC_END();
5909 }
5910 else
5911 {
5912 /*
5913 * We're writing a register to memory.
5914 */
5915 IEM_MC_BEGIN(0, 0);
5916 IEM_MC_LOCAL(uint8_t, u8Value);
5917 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5920 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5921 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5922 IEM_MC_ADVANCE_RIP_AND_FINISH();
5923 IEM_MC_END();
5924 }
5925}
5926
5927
5928/**
5929 * @opcode 0x89
5930 */
5931FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5932{
5933 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5934
5935 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5936
5937 /*
5938 * If rm is denoting a register, no more instruction bytes.
5939 */
5940 if (IEM_IS_MODRM_REG_MODE(bRm))
5941 {
5942 switch (pVCpu->iem.s.enmEffOpSize)
5943 {
5944 case IEMMODE_16BIT:
5945 IEM_MC_BEGIN(0, 0);
5946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5947 IEM_MC_LOCAL(uint16_t, u16Value);
5948 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5949 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5950 IEM_MC_ADVANCE_RIP_AND_FINISH();
5951 IEM_MC_END();
5952 break;
5953
5954 case IEMMODE_32BIT:
5955 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
5956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5957 IEM_MC_LOCAL(uint32_t, u32Value);
5958 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5959 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5960 IEM_MC_ADVANCE_RIP_AND_FINISH();
5961 IEM_MC_END();
5962 break;
5963
5964 case IEMMODE_64BIT:
5965 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
5966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5967 IEM_MC_LOCAL(uint64_t, u64Value);
5968 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5969 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5970 IEM_MC_ADVANCE_RIP_AND_FINISH();
5971 IEM_MC_END();
5972 break;
5973
5974 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5975 }
5976 }
5977 else
5978 {
5979 /*
5980 * We're writing a register to memory.
5981 */
5982 switch (pVCpu->iem.s.enmEffOpSize)
5983 {
5984 case IEMMODE_16BIT:
5985 IEM_MC_BEGIN(0, 0);
5986 IEM_MC_LOCAL(uint16_t, u16Value);
5987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5990 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5991 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5992 IEM_MC_ADVANCE_RIP_AND_FINISH();
5993 IEM_MC_END();
5994 break;
5995
5996 case IEMMODE_32BIT:
5997 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
5998 IEM_MC_LOCAL(uint32_t, u32Value);
5999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6002 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6003 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
6004 IEM_MC_ADVANCE_RIP_AND_FINISH();
6005 IEM_MC_END();
6006 break;
6007
6008 case IEMMODE_64BIT:
6009 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6010 IEM_MC_LOCAL(uint64_t, u64Value);
6011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6014 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6015 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
6016 IEM_MC_ADVANCE_RIP_AND_FINISH();
6017 IEM_MC_END();
6018 break;
6019
6020 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6021 }
6022 }
6023}
6024
6025
6026/**
6027 * @opcode 0x8a
6028 */
6029FNIEMOP_DEF(iemOp_mov_Gb_Eb)
6030{
6031 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
6032
6033 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6034
6035 /*
6036 * If rm is denoting a register, no more instruction bytes.
6037 */
6038 if (IEM_IS_MODRM_REG_MODE(bRm))
6039 {
6040 IEM_MC_BEGIN(0, 0);
6041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6042 IEM_MC_LOCAL(uint8_t, u8Value);
6043 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
6044 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
6045 IEM_MC_ADVANCE_RIP_AND_FINISH();
6046 IEM_MC_END();
6047 }
6048 else
6049 {
6050 /*
6051 * We're loading a register from memory.
6052 */
6053 IEM_MC_BEGIN(0, 0);
6054 IEM_MC_LOCAL(uint8_t, u8Value);
6055 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6058 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6059 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
6060 IEM_MC_ADVANCE_RIP_AND_FINISH();
6061 IEM_MC_END();
6062 }
6063}
6064
6065
6066/**
6067 * @opcode 0x8b
6068 */
6069FNIEMOP_DEF(iemOp_mov_Gv_Ev)
6070{
6071 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
6072
6073 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6074
6075 /*
6076 * If rm is denoting a register, no more instruction bytes.
6077 */
6078 if (IEM_IS_MODRM_REG_MODE(bRm))
6079 {
6080 switch (pVCpu->iem.s.enmEffOpSize)
6081 {
6082 case IEMMODE_16BIT:
6083 IEM_MC_BEGIN(0, 0);
6084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6085 IEM_MC_LOCAL(uint16_t, u16Value);
6086 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
6087 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
6088 IEM_MC_ADVANCE_RIP_AND_FINISH();
6089 IEM_MC_END();
6090 break;
6091
6092 case IEMMODE_32BIT:
6093 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6095 IEM_MC_LOCAL(uint32_t, u32Value);
6096 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
6097 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
6098 IEM_MC_ADVANCE_RIP_AND_FINISH();
6099 IEM_MC_END();
6100 break;
6101
6102 case IEMMODE_64BIT:
6103 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6105 IEM_MC_LOCAL(uint64_t, u64Value);
6106 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
6107 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
6108 IEM_MC_ADVANCE_RIP_AND_FINISH();
6109 IEM_MC_END();
6110 break;
6111
6112 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6113 }
6114 }
6115 else
6116 {
6117 /*
6118 * We're loading a register from memory.
6119 */
6120 switch (pVCpu->iem.s.enmEffOpSize)
6121 {
6122 case IEMMODE_16BIT:
6123 IEM_MC_BEGIN(0, 0);
6124 IEM_MC_LOCAL(uint16_t, u16Value);
6125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6128 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6129 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
6130 IEM_MC_ADVANCE_RIP_AND_FINISH();
6131 IEM_MC_END();
6132 break;
6133
6134 case IEMMODE_32BIT:
6135 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6136 IEM_MC_LOCAL(uint32_t, u32Value);
6137 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6140 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6141 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
6142 IEM_MC_ADVANCE_RIP_AND_FINISH();
6143 IEM_MC_END();
6144 break;
6145
6146 case IEMMODE_64BIT:
6147 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6148 IEM_MC_LOCAL(uint64_t, u64Value);
6149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6152 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6153 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
6154 IEM_MC_ADVANCE_RIP_AND_FINISH();
6155 IEM_MC_END();
6156 break;
6157
6158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6159 }
6160 }
6161}
6162
6163
6164/**
6165 * opcode 0x63
6166 * @todo Table fixme
6167 */
6168FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
6169{
6170 if (!IEM_IS_64BIT_CODE(pVCpu))
6171 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
6172 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6173 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
6174 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
6175}
6176
6177
6178/**
6179 * @opcode 0x8c
6180 */
6181FNIEMOP_DEF(iemOp_mov_Ev_Sw)
6182{
6183 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
6184
6185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6186
6187 /*
6188 * Check that the destination register exists. The REX.R prefix is ignored.
6189 */
6190 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
6191 if (iSegReg > X86_SREG_GS)
6192 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6193
6194 /*
6195 * If rm is denoting a register, no more instruction bytes.
6196 * In that case, the operand size is respected and the upper bits are
6197 * cleared (starting with some pentium).
6198 */
6199 if (IEM_IS_MODRM_REG_MODE(bRm))
6200 {
6201 switch (pVCpu->iem.s.enmEffOpSize)
6202 {
6203 case IEMMODE_16BIT:
6204 IEM_MC_BEGIN(0, 0);
6205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6206 IEM_MC_LOCAL(uint16_t, u16Value);
6207 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
6208 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
6209 IEM_MC_ADVANCE_RIP_AND_FINISH();
6210 IEM_MC_END();
6211 break;
6212
6213 case IEMMODE_32BIT:
6214 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6216 IEM_MC_LOCAL(uint32_t, u32Value);
6217 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
6218 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
6219 IEM_MC_ADVANCE_RIP_AND_FINISH();
6220 IEM_MC_END();
6221 break;
6222
6223 case IEMMODE_64BIT:
6224 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6226 IEM_MC_LOCAL(uint64_t, u64Value);
6227 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
6228 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
6229 IEM_MC_ADVANCE_RIP_AND_FINISH();
6230 IEM_MC_END();
6231 break;
6232
6233 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6234 }
6235 }
6236 else
6237 {
6238 /*
6239 * We're saving the register to memory. The access is word sized
6240 * regardless of operand size prefixes.
6241 */
6242#if 0 /* not necessary */
6243 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
6244#endif
6245 IEM_MC_BEGIN(0, 0);
6246 IEM_MC_LOCAL(uint16_t, u16Value);
6247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6250 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
6251 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
6252 IEM_MC_ADVANCE_RIP_AND_FINISH();
6253 IEM_MC_END();
6254 }
6255}
6256
6257
6258
6259
6260/**
6261 * @opcode 0x8d
6262 */
6263FNIEMOP_DEF(iemOp_lea_Gv_M)
6264{
6265 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
6266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6267 if (IEM_IS_MODRM_REG_MODE(bRm))
6268 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
6269
6270 switch (pVCpu->iem.s.enmEffOpSize)
6271 {
6272 case IEMMODE_16BIT:
6273 IEM_MC_BEGIN(0, 0);
6274 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6277 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
6278 * operand-size, which is usually the case. It'll save an instruction
6279 * and a register. */
6280 IEM_MC_LOCAL(uint16_t, u16Cast);
6281 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
6282 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
6283 IEM_MC_ADVANCE_RIP_AND_FINISH();
6284 IEM_MC_END();
6285 break;
6286
6287 case IEMMODE_32BIT:
6288 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6289 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6292 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
6293 * operand-size, which is usually the case. It'll save an instruction
6294 * and a register. */
6295 IEM_MC_LOCAL(uint32_t, u32Cast);
6296 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
6297 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
6298 IEM_MC_ADVANCE_RIP_AND_FINISH();
6299 IEM_MC_END();
6300 break;
6301
6302 case IEMMODE_64BIT:
6303 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6307 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
6308 IEM_MC_ADVANCE_RIP_AND_FINISH();
6309 IEM_MC_END();
6310 break;
6311
6312 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6313 }
6314}
6315
6316
6317/**
6318 * @opcode 0x8e
6319 */
6320FNIEMOP_DEF(iemOp_mov_Sw_Ev)
6321{
6322 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
6323
6324 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6325
6326 /*
6327 * The practical operand size is 16-bit.
6328 */
6329#if 0 /* not necessary */
6330 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
6331#endif
6332
6333 /*
6334 * Check that the destination register exists and can be used with this
6335 * instruction. The REX.R prefix is ignored.
6336 */
6337 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
6338 /** @todo r=bird: What does 8086 do here wrt CS? */
6339 if ( iSegReg == X86_SREG_CS
6340 || iSegReg > X86_SREG_GS)
6341 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6342
6343 /*
6344 * If rm is denoting a register, no more instruction bytes.
6345 *
6346 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
6347 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
6348 * register. This is a restriction of the current recompiler
6349 * approach.
6350 */
6351 if (IEM_IS_MODRM_REG_MODE(bRm))
6352 {
6353#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
6354 IEM_MC_BEGIN(0, a_fCImplFlags); \
6355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6356 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
6357 IEM_MC_ARG(uint16_t, u16Value, 1); \
6358 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
6359 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
6360 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
6361 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
6362 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
6363 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
6364 iemCImpl_load_SReg, iSRegArg, u16Value); \
6365 IEM_MC_END()
6366
6367 if (iSegReg == X86_SREG_SS)
6368 {
6369 if (IEM_IS_32BIT_CODE(pVCpu))
6370 {
6371 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
6372 }
6373 else
6374 {
6375 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
6376 }
6377 }
6378 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
6379 {
6380 IEMOP_MOV_SW_EV_REG_BODY(0);
6381 }
6382 else
6383 {
6384 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
6385 }
6386#undef IEMOP_MOV_SW_EV_REG_BODY
6387 }
6388 else
6389 {
6390 /*
6391 * We're loading the register from memory. The access is word sized
6392 * regardless of operand size prefixes.
6393 */
6394#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
6395 IEM_MC_BEGIN(0, a_fCImplFlags); \
6396 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
6397 IEM_MC_ARG(uint16_t, u16Value, 1); \
6398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
6399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
6400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6401 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
6402 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
6403 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
6404 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
6405 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
6406 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
6407 iemCImpl_load_SReg, iSRegArg, u16Value); \
6408 IEM_MC_END()
6409
6410 if (iSegReg == X86_SREG_SS)
6411 {
6412 if (IEM_IS_32BIT_CODE(pVCpu))
6413 {
6414 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
6415 }
6416 else
6417 {
6418 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
6419 }
6420 }
6421 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
6422 {
6423 IEMOP_MOV_SW_EV_MEM_BODY(0);
6424 }
6425 else
6426 {
6427 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
6428 }
6429#undef IEMOP_MOV_SW_EV_MEM_BODY
6430 }
6431}
6432
6433
6434/** Opcode 0x8f /0. */
6435FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
6436{
6437 /* This bugger is rather annoying as it requires rSP to be updated before
6438 doing the effective address calculations. Will eventually require a
6439 split between the R/M+SIB decoding and the effective address
6440 calculation - which is something that is required for any attempt at
6441 reusing this code for a recompiler. It may also be good to have if we
6442 need to delay #UD exception caused by invalid lock prefixes.
6443
6444 For now, we'll do a mostly safe interpreter-only implementation here. */
6445 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
6446 * now until tests show it's checked.. */
6447 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
6448
6449 /* Register access is relatively easy and can share code. */
6450 if (IEM_IS_MODRM_REG_MODE(bRm))
6451 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
6452
6453 /*
6454 * Memory target.
6455 *
6456 * Intel says that RSP is incremented before it's used in any effective
6457 * address calcuations. This means some serious extra annoyance here since
6458 * we decode and calculate the effective address in one step and like to
6459 * delay committing registers till everything is done.
6460 *
6461 * So, we'll decode and calculate the effective address twice. This will
6462 * require some recoding if turned into a recompiler.
6463 */
6464 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
6465
6466#if 1 /* This can be compiled, optimize later if needed. */
6467 switch (pVCpu->iem.s.enmEffOpSize)
6468 {
6469 case IEMMODE_16BIT:
6470 IEM_MC_BEGIN(0, 0);
6471 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
6473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6474 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6475 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
6476 IEM_MC_END();
6477 break;
6478
6479 case IEMMODE_32BIT:
6480 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6481 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
6483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6484 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6485 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
6486 IEM_MC_END();
6487 break;
6488
6489 case IEMMODE_64BIT:
6490 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6491 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
6493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6494 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6495 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
6496 IEM_MC_END();
6497 break;
6498
6499 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6500 }
6501
6502#else
6503# ifndef TST_IEM_CHECK_MC
6504 /* Calc effective address with modified ESP. */
6505/** @todo testcase */
6506 RTGCPTR GCPtrEff;
6507 VBOXSTRICTRC rcStrict;
6508 switch (pVCpu->iem.s.enmEffOpSize)
6509 {
6510 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
6511 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
6512 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
6513 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6514 }
6515 if (rcStrict != VINF_SUCCESS)
6516 return rcStrict;
6517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6518
6519 /* Perform the operation - this should be CImpl. */
6520 RTUINT64U TmpRsp;
6521 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
6522 switch (pVCpu->iem.s.enmEffOpSize)
6523 {
6524 case IEMMODE_16BIT:
6525 {
6526 uint16_t u16Value;
6527 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
6528 if (rcStrict == VINF_SUCCESS)
6529 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
6530 break;
6531 }
6532
6533 case IEMMODE_32BIT:
6534 {
6535 uint32_t u32Value;
6536 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
6537 if (rcStrict == VINF_SUCCESS)
6538 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
6539 break;
6540 }
6541
6542 case IEMMODE_64BIT:
6543 {
6544 uint64_t u64Value;
6545 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
6546 if (rcStrict == VINF_SUCCESS)
6547 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
6548 break;
6549 }
6550
6551 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6552 }
6553 if (rcStrict == VINF_SUCCESS)
6554 {
6555 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
6556 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
6557 }
6558 return rcStrict;
6559
6560# else
6561 return VERR_IEM_IPE_2;
6562# endif
6563#endif
6564}
6565
6566
6567/**
6568 * @opcode 0x8f
6569 */
6570FNIEMOP_DEF(iemOp_Grp1A__xop)
6571{
6572 /*
6573 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
6574 * three byte VEX prefix, except that the mmmmm field cannot have the values
6575 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
6576 */
6577 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6578 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
6579 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
6580
6581 IEMOP_MNEMONIC(xop, "xop");
6582 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
6583 {
6584 /** @todo Test when exctly the XOP conformance checks kick in during
6585 * instruction decoding and fetching (using \#PF). */
6586 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6587 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6588 if ( ( pVCpu->iem.s.fPrefixes
6589 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6590 == 0)
6591 {
6592 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6593 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6594 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6595 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6596 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6597 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6598 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6599 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6600 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6601
6602 /** @todo XOP: Just use new tables and decoders. */
6603 switch (bRm & 0x1f)
6604 {
6605 case 8: /* xop opcode map 8. */
6606 IEMOP_BITCH_ABOUT_STUB();
6607 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6608
6609 case 9: /* xop opcode map 9. */
6610 IEMOP_BITCH_ABOUT_STUB();
6611 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6612
6613 case 10: /* xop opcode map 10. */
6614 IEMOP_BITCH_ABOUT_STUB();
6615 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6616
6617 default:
6618 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6619 IEMOP_RAISE_INVALID_OPCODE_RET();
6620 }
6621 }
6622 else
6623 Log(("XOP: Invalid prefix mix!\n"));
6624 }
6625 else
6626 Log(("XOP: XOP support disabled!\n"));
6627 IEMOP_RAISE_INVALID_OPCODE_RET();
6628}
6629
6630
6631/**
6632 * Common 'xchg reg,rAX' helper.
6633 */
6634FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6635{
6636 iReg |= pVCpu->iem.s.uRexB;
6637 switch (pVCpu->iem.s.enmEffOpSize)
6638 {
6639 case IEMMODE_16BIT:
6640 IEM_MC_BEGIN(0, 0);
6641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6642 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6643 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6644 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6645 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6646 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6647 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6648 IEM_MC_ADVANCE_RIP_AND_FINISH();
6649 IEM_MC_END();
6650 break;
6651
6652 case IEMMODE_32BIT:
6653 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6655 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6656 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6657 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6658 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6659 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6660 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6661 IEM_MC_ADVANCE_RIP_AND_FINISH();
6662 IEM_MC_END();
6663 break;
6664
6665 case IEMMODE_64BIT:
6666 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6668 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6669 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6670 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6671 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6672 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6673 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6674 IEM_MC_ADVANCE_RIP_AND_FINISH();
6675 IEM_MC_END();
6676 break;
6677
6678 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6679 }
6680}
6681
6682
6683/**
6684 * @opcode 0x90
6685 */
6686FNIEMOP_DEF(iemOp_nop)
6687{
6688 /* R8/R8D and RAX/EAX can be exchanged. */
6689 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6690 {
6691 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6692 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6693 }
6694
6695 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6696 {
6697 IEMOP_MNEMONIC(pause, "pause");
6698 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6699 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6700 if (!IEM_IS_IN_GUEST(pVCpu))
6701 { /* probable */ }
6702#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6703 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6704 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmx_pause);
6705#endif
6706#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6707 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6708 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_svm_pause);
6709#endif
6710 }
6711 else
6712 IEMOP_MNEMONIC(nop, "nop");
6713 /** @todo testcase: lock nop; lock pause */
6714 IEM_MC_BEGIN(0, 0);
6715 IEMOP_HLP_DONE_DECODING();
6716 IEM_MC_ADVANCE_RIP_AND_FINISH();
6717 IEM_MC_END();
6718}
6719
6720
6721/**
6722 * @opcode 0x91
6723 */
6724FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6725{
6726 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6727 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6728}
6729
6730
6731/**
6732 * @opcode 0x92
6733 */
6734FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6735{
6736 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6737 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6738}
6739
6740
6741/**
6742 * @opcode 0x93
6743 */
6744FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6745{
6746 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6747 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6748}
6749
6750
6751/**
6752 * @opcode 0x94
6753 */
6754FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6755{
6756 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6757 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6758}
6759
6760
6761/**
6762 * @opcode 0x95
6763 */
6764FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6765{
6766 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6767 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6768}
6769
6770
6771/**
6772 * @opcode 0x96
6773 */
6774FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6775{
6776 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6777 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6778}
6779
6780
6781/**
6782 * @opcode 0x97
6783 */
6784FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6785{
6786 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6787 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6788}
6789
6790
6791/**
6792 * @opcode 0x98
6793 */
6794FNIEMOP_DEF(iemOp_cbw)
6795{
6796 switch (pVCpu->iem.s.enmEffOpSize)
6797 {
6798 case IEMMODE_16BIT:
6799 IEMOP_MNEMONIC(cbw, "cbw");
6800 IEM_MC_BEGIN(0, 0);
6801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6802 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6803 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6804 } IEM_MC_ELSE() {
6805 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6806 } IEM_MC_ENDIF();
6807 IEM_MC_ADVANCE_RIP_AND_FINISH();
6808 IEM_MC_END();
6809 break;
6810
6811 case IEMMODE_32BIT:
6812 IEMOP_MNEMONIC(cwde, "cwde");
6813 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6815 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6816 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6817 } IEM_MC_ELSE() {
6818 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6819 } IEM_MC_ENDIF();
6820 IEM_MC_ADVANCE_RIP_AND_FINISH();
6821 IEM_MC_END();
6822 break;
6823
6824 case IEMMODE_64BIT:
6825 IEMOP_MNEMONIC(cdqe, "cdqe");
6826 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6828 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6829 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6830 } IEM_MC_ELSE() {
6831 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6832 } IEM_MC_ENDIF();
6833 IEM_MC_ADVANCE_RIP_AND_FINISH();
6834 IEM_MC_END();
6835 break;
6836
6837 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6838 }
6839}
6840
6841
6842/**
6843 * @opcode 0x99
6844 */
6845FNIEMOP_DEF(iemOp_cwd)
6846{
6847 switch (pVCpu->iem.s.enmEffOpSize)
6848 {
6849 case IEMMODE_16BIT:
6850 IEMOP_MNEMONIC(cwd, "cwd");
6851 IEM_MC_BEGIN(0, 0);
6852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6853 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6854 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6855 } IEM_MC_ELSE() {
6856 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6857 } IEM_MC_ENDIF();
6858 IEM_MC_ADVANCE_RIP_AND_FINISH();
6859 IEM_MC_END();
6860 break;
6861
6862 case IEMMODE_32BIT:
6863 IEMOP_MNEMONIC(cdq, "cdq");
6864 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
6865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6866 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6867 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6868 } IEM_MC_ELSE() {
6869 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6870 } IEM_MC_ENDIF();
6871 IEM_MC_ADVANCE_RIP_AND_FINISH();
6872 IEM_MC_END();
6873 break;
6874
6875 case IEMMODE_64BIT:
6876 IEMOP_MNEMONIC(cqo, "cqo");
6877 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6879 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6880 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6881 } IEM_MC_ELSE() {
6882 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6883 } IEM_MC_ENDIF();
6884 IEM_MC_ADVANCE_RIP_AND_FINISH();
6885 IEM_MC_END();
6886 break;
6887
6888 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6889 }
6890}
6891
6892
6893/**
6894 * @opcode 0x9a
6895 */
6896FNIEMOP_DEF(iemOp_call_Ap)
6897{
6898 IEMOP_MNEMONIC(call_Ap, "call Ap");
6899 IEMOP_HLP_NO_64BIT();
6900
6901 /* Decode the far pointer address and pass it on to the far call C implementation. */
6902 uint32_t off32Seg;
6903 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6904 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6905 else
6906 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6907 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6909 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
6910 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
6911 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6912 /** @todo make task-switches, ring-switches, ++ return non-zero status */
6913}
6914
6915
6916/** Opcode 0x9b. (aka fwait) */
6917FNIEMOP_DEF(iemOp_wait)
6918{
6919 IEMOP_MNEMONIC(wait, "wait");
6920 IEM_MC_BEGIN(0, 0);
6921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6922 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6923 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6924 IEM_MC_ADVANCE_RIP_AND_FINISH();
6925 IEM_MC_END();
6926}
6927
6928
6929/**
6930 * @opcode 0x9c
6931 */
6932FNIEMOP_DEF(iemOp_pushf_Fv)
6933{
6934 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6936 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6937 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6938 iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6939}
6940
6941
6942/**
6943 * @opcode 0x9d
6944 */
6945FNIEMOP_DEF(iemOp_popf_Fv)
6946{
6947 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6949 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6950 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6951 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6952 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6953}
6954
6955
6956/**
6957 * @opcode 0x9e
6958 * @opflmodify cf,pf,af,zf,sf
6959 */
6960FNIEMOP_DEF(iemOp_sahf)
6961{
6962 IEMOP_MNEMONIC(sahf, "sahf");
6963 if ( IEM_IS_64BIT_CODE(pVCpu)
6964 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6965 IEMOP_RAISE_INVALID_OPCODE_RET();
6966 IEM_MC_BEGIN(0, 0);
6967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6968 IEM_MC_LOCAL(uint32_t, u32Flags);
6969 IEM_MC_LOCAL(uint32_t, EFlags);
6970 IEM_MC_FETCH_EFLAGS(EFlags);
6971 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6972 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6973 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6974 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6975 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6976 IEM_MC_COMMIT_EFLAGS(EFlags);
6977 IEM_MC_ADVANCE_RIP_AND_FINISH();
6978 IEM_MC_END();
6979}
6980
6981
6982/**
6983 * @opcode 0x9f
6984 * @opfltest cf,pf,af,zf,sf
6985 */
6986FNIEMOP_DEF(iemOp_lahf)
6987{
6988 IEMOP_MNEMONIC(lahf, "lahf");
6989 if ( IEM_IS_64BIT_CODE(pVCpu)
6990 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6991 IEMOP_RAISE_INVALID_OPCODE_RET();
6992 IEM_MC_BEGIN(0, 0);
6993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6994 IEM_MC_LOCAL(uint8_t, u8Flags);
6995 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6996 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6997 IEM_MC_ADVANCE_RIP_AND_FINISH();
6998 IEM_MC_END();
6999}
7000
7001
7002/**
7003 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
7004 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
7005 * Will return/throw on failures.
7006 * @param a_GCPtrMemOff The variable to store the offset in.
7007 */
7008#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
7009 do \
7010 { \
7011 switch (pVCpu->iem.s.enmEffAddrMode) \
7012 { \
7013 case IEMMODE_16BIT: \
7014 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
7015 break; \
7016 case IEMMODE_32BIT: \
7017 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
7018 break; \
7019 case IEMMODE_64BIT: \
7020 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
7021 break; \
7022 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
7023 } \
7024 } while (0)
7025
7026/**
7027 * @opcode 0xa0
7028 */
7029FNIEMOP_DEF(iemOp_mov_AL_Ob)
7030{
7031 /*
7032 * Get the offset.
7033 */
7034 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
7035 RTGCPTR GCPtrMemOffDecode;
7036 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
7037
7038 /*
7039 * Fetch AL.
7040 */
7041 IEM_MC_BEGIN(0, 0);
7042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7043 IEM_MC_LOCAL(uint8_t, u8Tmp);
7044 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7045 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
7046 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7047 IEM_MC_ADVANCE_RIP_AND_FINISH();
7048 IEM_MC_END();
7049}
7050
7051
7052/**
7053 * @opcode 0xa1
7054 */
7055FNIEMOP_DEF(iemOp_mov_rAX_Ov)
7056{
7057 /*
7058 * Get the offset.
7059 */
7060 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
7061 RTGCPTR GCPtrMemOffDecode;
7062 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
7063
7064 /*
7065 * Fetch rAX.
7066 */
7067 switch (pVCpu->iem.s.enmEffOpSize)
7068 {
7069 case IEMMODE_16BIT:
7070 IEM_MC_BEGIN(0, 0);
7071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7072 IEM_MC_LOCAL(uint16_t, u16Tmp);
7073 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7074 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
7075 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
7076 IEM_MC_ADVANCE_RIP_AND_FINISH();
7077 IEM_MC_END();
7078 break;
7079
7080 case IEMMODE_32BIT:
7081 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7083 IEM_MC_LOCAL(uint32_t, u32Tmp);
7084 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7085 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
7086 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
7087 IEM_MC_ADVANCE_RIP_AND_FINISH();
7088 IEM_MC_END();
7089 break;
7090
7091 case IEMMODE_64BIT:
7092 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7094 IEM_MC_LOCAL(uint64_t, u64Tmp);
7095 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7096 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
7097 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
7098 IEM_MC_ADVANCE_RIP_AND_FINISH();
7099 IEM_MC_END();
7100 break;
7101
7102 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7103 }
7104}
7105
7106
7107/**
7108 * @opcode 0xa2
7109 */
7110FNIEMOP_DEF(iemOp_mov_Ob_AL)
7111{
7112 /*
7113 * Get the offset.
7114 */
7115 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
7116 RTGCPTR GCPtrMemOffDecode;
7117 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
7118
7119 /*
7120 * Store AL.
7121 */
7122 IEM_MC_BEGIN(0, 0);
7123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7124 IEM_MC_LOCAL(uint8_t, u8Tmp);
7125 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
7126 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7127 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
7128 IEM_MC_ADVANCE_RIP_AND_FINISH();
7129 IEM_MC_END();
7130}
7131
7132
7133/**
7134 * @opcode 0xa3
7135 */
7136FNIEMOP_DEF(iemOp_mov_Ov_rAX)
7137{
7138 /*
7139 * Get the offset.
7140 */
7141 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
7142 RTGCPTR GCPtrMemOffDecode;
7143 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
7144
7145 /*
7146 * Store rAX.
7147 */
7148 switch (pVCpu->iem.s.enmEffOpSize)
7149 {
7150 case IEMMODE_16BIT:
7151 IEM_MC_BEGIN(0, 0);
7152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7153 IEM_MC_LOCAL(uint16_t, u16Tmp);
7154 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
7155 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7156 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
7157 IEM_MC_ADVANCE_RIP_AND_FINISH();
7158 IEM_MC_END();
7159 break;
7160
7161 case IEMMODE_32BIT:
7162 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7164 IEM_MC_LOCAL(uint32_t, u32Tmp);
7165 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
7166 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7167 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
7168 IEM_MC_ADVANCE_RIP_AND_FINISH();
7169 IEM_MC_END();
7170 break;
7171
7172 case IEMMODE_64BIT:
7173 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7175 IEM_MC_LOCAL(uint64_t, u64Tmp);
7176 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
7177 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7178 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
7179 IEM_MC_ADVANCE_RIP_AND_FINISH();
7180 IEM_MC_END();
7181 break;
7182
7183 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7184 }
7185}
7186
7187/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
7188#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
7189 IEM_MC_BEGIN(a_fMcFlags, 0); \
7190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7191 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7192 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7193 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7194 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7195 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7196 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7197 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7198 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7199 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7200 } IEM_MC_ELSE() { \
7201 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7202 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7203 } IEM_MC_ENDIF(); \
7204 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7205 IEM_MC_END() \
7206
7207/**
7208 * @opcode 0xa4
7209 * @opfltest df
7210 */
7211FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
7212{
7213 /*
7214 * Use the C implementation if a repeat prefix is encountered.
7215 */
7216 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7217 {
7218 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
7219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7220 switch (pVCpu->iem.s.enmEffAddrMode)
7221 {
7222 case IEMMODE_16BIT:
7223 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7224 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7225 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7226 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7227 iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
7228 case IEMMODE_32BIT:
7229 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7230 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7231 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7232 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7233 iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
7234 case IEMMODE_64BIT:
7235 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7236 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7237 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7238 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7239 iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
7240 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7241 }
7242 }
7243
7244 /*
7245 * Sharing case implementation with movs[wdq] below.
7246 */
7247 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
7248 switch (pVCpu->iem.s.enmEffAddrMode)
7249 {
7250 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7251 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7252 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
7253 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7254 }
7255}
7256
7257
7258/**
7259 * @opcode 0xa5
7260 * @opfltest df
7261 */
7262FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
7263{
7264
7265 /*
7266 * Use the C implementation if a repeat prefix is encountered.
7267 */
7268 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7269 {
7270 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
7271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7272 switch (pVCpu->iem.s.enmEffOpSize)
7273 {
7274 case IEMMODE_16BIT:
7275 switch (pVCpu->iem.s.enmEffAddrMode)
7276 {
7277 case IEMMODE_16BIT:
7278 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7279 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7280 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7281 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7282 iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
7283 case IEMMODE_32BIT:
7284 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7285 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7286 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7287 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7288 iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
7289 case IEMMODE_64BIT:
7290 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7291 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7292 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7293 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7294 iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
7295 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7296 }
7297 break;
7298 case IEMMODE_32BIT:
7299 switch (pVCpu->iem.s.enmEffAddrMode)
7300 {
7301 case IEMMODE_16BIT:
7302 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7303 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7304 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7305 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7306 iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
7307 case IEMMODE_32BIT:
7308 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7309 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7310 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7311 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7312 iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
7313 case IEMMODE_64BIT:
7314 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7315 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7316 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7317 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7318 iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
7319 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7320 }
7321 case IEMMODE_64BIT:
7322 switch (pVCpu->iem.s.enmEffAddrMode)
7323 {
7324 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
7325 case IEMMODE_32BIT:
7326 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7327 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7328 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7329 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7330 iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
7331 case IEMMODE_64BIT:
7332 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7333 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7334 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7335 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7336 iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
7337 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7338 }
7339 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7340 }
7341 }
7342
7343 /*
7344 * Annoying double switch here.
7345 * Using ugly macro for implementing the cases, sharing it with movsb.
7346 */
7347 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
7348 switch (pVCpu->iem.s.enmEffOpSize)
7349 {
7350 case IEMMODE_16BIT:
7351 switch (pVCpu->iem.s.enmEffAddrMode)
7352 {
7353 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7354 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7355 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
7356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7357 }
7358 break;
7359
7360 case IEMMODE_32BIT:
7361 switch (pVCpu->iem.s.enmEffAddrMode)
7362 {
7363 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7364 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7365 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
7366 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7367 }
7368 break;
7369
7370 case IEMMODE_64BIT:
7371 switch (pVCpu->iem.s.enmEffAddrMode)
7372 {
7373 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7374 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
7375 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
7376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7377 }
7378 break;
7379 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7380 }
7381}
7382
7383#undef IEM_MOVS_CASE
7384
7385/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
7386#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
7387 IEM_MC_BEGIN(a_fMcFlags, 0); \
7388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7389 \
7390 IEM_MC_LOCAL(RTGCPTR, uAddr1); \
7391 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr1, X86_GREG_xSI); \
7392 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
7393 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr1); \
7394 \
7395 IEM_MC_LOCAL(RTGCPTR, uAddr2); \
7396 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr2, X86_GREG_xDI); \
7397 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
7398 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr2); \
7399 \
7400 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7401 IEM_MC_REF_EFLAGS(pEFlags); \
7402 IEM_MC_ARG_LOCAL_REF(uint##ValBits##_t *, puValue1, uValue1, 0); \
7403 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
7404 \
7405 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7406 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7407 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7408 } IEM_MC_ELSE() { \
7409 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7410 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7411 } IEM_MC_ENDIF(); \
7412 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7413 IEM_MC_END() \
7414
7415/**
7416 * @opcode 0xa6
7417 * @opflclass arithmetic
7418 * @opfltest df
7419 */
7420FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
7421{
7422
7423 /*
7424 * Use the C implementation if a repeat prefix is encountered.
7425 */
7426 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7427 {
7428 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
7429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7430 switch (pVCpu->iem.s.enmEffAddrMode)
7431 {
7432 case IEMMODE_16BIT:
7433 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7434 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7435 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7436 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7437 iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
7438 case IEMMODE_32BIT:
7439 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7440 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7441 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7442 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7443 iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7444 case IEMMODE_64BIT:
7445 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7446 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7447 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7448 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7449 iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7450 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7451 }
7452 }
7453 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7454 {
7455 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
7456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7457 switch (pVCpu->iem.s.enmEffAddrMode)
7458 {
7459 case IEMMODE_16BIT:
7460 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7461 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7462 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7463 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7464 iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
7465 case IEMMODE_32BIT:
7466 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7467 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7468 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7469 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7470 iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7471 case IEMMODE_64BIT:
7472 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7473 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7474 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7475 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7476 iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7477 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7478 }
7479 }
7480
7481 /*
7482 * Sharing case implementation with cmps[wdq] below.
7483 */
7484 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
7485 switch (pVCpu->iem.s.enmEffAddrMode)
7486 {
7487 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7488 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7489 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
7490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7491 }
7492}
7493
7494
7495/**
7496 * @opcode 0xa7
7497 * @opflclass arithmetic
7498 * @opfltest df
7499 */
7500FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
7501{
7502 /*
7503 * Use the C implementation if a repeat prefix is encountered.
7504 */
7505 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7506 {
7507 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
7508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7509 switch (pVCpu->iem.s.enmEffOpSize)
7510 {
7511 case IEMMODE_16BIT:
7512 switch (pVCpu->iem.s.enmEffAddrMode)
7513 {
7514 case IEMMODE_16BIT:
7515 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7516 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7517 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7518 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7519 iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7520 case IEMMODE_32BIT:
7521 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7522 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7523 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7524 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7525 iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7526 case IEMMODE_64BIT:
7527 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7528 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7529 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7530 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7531 iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7532 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7533 }
7534 break;
7535 case IEMMODE_32BIT:
7536 switch (pVCpu->iem.s.enmEffAddrMode)
7537 {
7538 case IEMMODE_16BIT:
7539 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7540 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7541 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7542 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7543 iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7544 case IEMMODE_32BIT:
7545 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7546 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7547 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7548 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7549 iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7550 case IEMMODE_64BIT:
7551 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7552 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7553 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7554 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7555 iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7556 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7557 }
7558 case IEMMODE_64BIT:
7559 switch (pVCpu->iem.s.enmEffAddrMode)
7560 {
7561 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
7562 case IEMMODE_32BIT:
7563 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7564 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7565 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7566 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7567 iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7568 case IEMMODE_64BIT:
7569 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7570 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7571 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7572 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7573 iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7574 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7575 }
7576 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7577 }
7578 }
7579
7580 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7581 {
7582 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
7583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7584 switch (pVCpu->iem.s.enmEffOpSize)
7585 {
7586 case IEMMODE_16BIT:
7587 switch (pVCpu->iem.s.enmEffAddrMode)
7588 {
7589 case IEMMODE_16BIT:
7590 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7591 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7592 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7593 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7594 iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7595 case IEMMODE_32BIT:
7596 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7597 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7598 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7599 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7600 iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7601 case IEMMODE_64BIT:
7602 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7603 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7604 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7605 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7606 iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7607 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7608 }
7609 break;
7610 case IEMMODE_32BIT:
7611 switch (pVCpu->iem.s.enmEffAddrMode)
7612 {
7613 case IEMMODE_16BIT:
7614 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7615 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7616 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7617 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7618 iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7619 case IEMMODE_32BIT:
7620 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7621 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7622 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7623 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7624 iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7625 case IEMMODE_64BIT:
7626 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7627 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7628 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7629 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7630 iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7631 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7632 }
7633 case IEMMODE_64BIT:
7634 switch (pVCpu->iem.s.enmEffAddrMode)
7635 {
7636 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
7637 case IEMMODE_32BIT:
7638 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7639 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7640 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7641 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7642 iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7643 case IEMMODE_64BIT:
7644 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7645 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7646 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7647 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7648 iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7649 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7650 }
7651 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7652 }
7653 }
7654
7655 /*
7656 * Annoying double switch here.
7657 * Using ugly macro for implementing the cases, sharing it with cmpsb.
7658 */
7659 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
7660 switch (pVCpu->iem.s.enmEffOpSize)
7661 {
7662 case IEMMODE_16BIT:
7663 switch (pVCpu->iem.s.enmEffAddrMode)
7664 {
7665 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7666 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7667 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
7668 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7669 }
7670 break;
7671
7672 case IEMMODE_32BIT:
7673 switch (pVCpu->iem.s.enmEffAddrMode)
7674 {
7675 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7676 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7677 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
7678 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7679 }
7680 break;
7681
7682 case IEMMODE_64BIT:
7683 switch (pVCpu->iem.s.enmEffAddrMode)
7684 {
7685 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7686 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
7687 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
7688 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7689 }
7690 break;
7691 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7692 }
7693}
7694
7695#undef IEM_CMPS_CASE
7696
7697/**
7698 * @opcode 0xa8
7699 * @opflclass logical
7700 */
7701FNIEMOP_DEF(iemOp_test_AL_Ib)
7702{
7703 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
7704 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7705 IEMOP_BODY_BINARY_AL_Ib(test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
7706}
7707
7708
7709/**
7710 * @opcode 0xa9
7711 * @opflclass logical
7712 */
7713FNIEMOP_DEF(iemOp_test_eAX_Iz)
7714{
7715 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
7716 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7717 IEMOP_BODY_BINARY_rAX_Iz_RO(test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
7718}
7719
7720
7721/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
7722#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
7723 IEM_MC_BEGIN(a_fMcFlags, 0); \
7724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7725 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7726 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7727 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
7728 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7729 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7730 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7731 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7732 } IEM_MC_ELSE() { \
7733 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7734 } IEM_MC_ENDIF(); \
7735 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7736 IEM_MC_END() \
7737
7738/**
7739 * @opcode 0xaa
7740 */
7741FNIEMOP_DEF(iemOp_stosb_Yb_AL)
7742{
7743 /*
7744 * Use the C implementation if a repeat prefix is encountered.
7745 */
7746 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7747 {
7748 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
7749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7750 switch (pVCpu->iem.s.enmEffAddrMode)
7751 {
7752 case IEMMODE_16BIT:
7753 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7754 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7755 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7756 iemCImpl_stos_al_m16);
7757 case IEMMODE_32BIT:
7758 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7759 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7760 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7761 iemCImpl_stos_al_m32);
7762 case IEMMODE_64BIT:
7763 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7764 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7765 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7766 iemCImpl_stos_al_m64);
7767 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7768 }
7769 }
7770
7771 /*
7772 * Sharing case implementation with stos[wdq] below.
7773 */
7774 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
7775 switch (pVCpu->iem.s.enmEffAddrMode)
7776 {
7777 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7778 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7779 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
7780 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7781 }
7782}
7783
7784
7785/**
7786 * @opcode 0xab
7787 */
7788FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7789{
7790 /*
7791 * Use the C implementation if a repeat prefix is encountered.
7792 */
7793 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7794 {
7795 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7797 switch (pVCpu->iem.s.enmEffOpSize)
7798 {
7799 case IEMMODE_16BIT:
7800 switch (pVCpu->iem.s.enmEffAddrMode)
7801 {
7802 case IEMMODE_16BIT:
7803 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7804 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7805 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7806 iemCImpl_stos_ax_m16);
7807 case IEMMODE_32BIT:
7808 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7809 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7810 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7811 iemCImpl_stos_ax_m32);
7812 case IEMMODE_64BIT:
7813 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7814 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7815 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7816 iemCImpl_stos_ax_m64);
7817 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7818 }
7819 break;
7820 case IEMMODE_32BIT:
7821 switch (pVCpu->iem.s.enmEffAddrMode)
7822 {
7823 case IEMMODE_16BIT:
7824 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7825 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7826 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7827 iemCImpl_stos_eax_m16);
7828 case IEMMODE_32BIT:
7829 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7830 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7831 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7832 iemCImpl_stos_eax_m32);
7833 case IEMMODE_64BIT:
7834 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7835 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7836 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7837 iemCImpl_stos_eax_m64);
7838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7839 }
7840 case IEMMODE_64BIT:
7841 switch (pVCpu->iem.s.enmEffAddrMode)
7842 {
7843 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7844 case IEMMODE_32BIT:
7845 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7846 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7847 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7848 iemCImpl_stos_rax_m32);
7849 case IEMMODE_64BIT:
7850 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7851 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7852 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7853 iemCImpl_stos_rax_m64);
7854 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7855 }
7856 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7857 }
7858 }
7859
7860 /*
7861 * Annoying double switch here.
7862 * Using ugly macro for implementing the cases, sharing it with stosb.
7863 */
7864 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7865 switch (pVCpu->iem.s.enmEffOpSize)
7866 {
7867 case IEMMODE_16BIT:
7868 switch (pVCpu->iem.s.enmEffAddrMode)
7869 {
7870 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7871 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7872 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7873 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7874 }
7875 break;
7876
7877 case IEMMODE_32BIT:
7878 switch (pVCpu->iem.s.enmEffAddrMode)
7879 {
7880 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7881 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7882 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7883 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7884 }
7885 break;
7886
7887 case IEMMODE_64BIT:
7888 switch (pVCpu->iem.s.enmEffAddrMode)
7889 {
7890 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7891 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7892 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7893 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7894 }
7895 break;
7896 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7897 }
7898}
7899
7900#undef IEM_STOS_CASE
7901
7902/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7903#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7904 IEM_MC_BEGIN(a_fMcFlags, 0); \
7905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7906 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7907 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7908 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7909 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7910 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7911 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7912 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7913 } IEM_MC_ELSE() { \
7914 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7915 } IEM_MC_ENDIF(); \
7916 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7917 IEM_MC_END() \
7918
7919/**
7920 * @opcode 0xac
7921 * @opfltest df
7922 */
7923FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7924{
7925 /*
7926 * Use the C implementation if a repeat prefix is encountered.
7927 */
7928 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7929 {
7930 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7932 switch (pVCpu->iem.s.enmEffAddrMode)
7933 {
7934 case IEMMODE_16BIT:
7935 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7936 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7937 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7938 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7939 iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7940 case IEMMODE_32BIT:
7941 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7942 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7943 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7944 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7945 iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7946 case IEMMODE_64BIT:
7947 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7948 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7949 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7950 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7951 iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7952 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7953 }
7954 }
7955
7956 /*
7957 * Sharing case implementation with stos[wdq] below.
7958 */
7959 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7960 switch (pVCpu->iem.s.enmEffAddrMode)
7961 {
7962 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7963 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7964 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7965 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7966 }
7967}
7968
7969
7970/**
7971 * @opcode 0xad
7972 * @opfltest df
7973 */
7974FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7975{
7976 /*
7977 * Use the C implementation if a repeat prefix is encountered.
7978 */
7979 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7980 {
7981 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7983 switch (pVCpu->iem.s.enmEffOpSize)
7984 {
7985 case IEMMODE_16BIT:
7986 switch (pVCpu->iem.s.enmEffAddrMode)
7987 {
7988 case IEMMODE_16BIT:
7989 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7990 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7991 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7992 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7993 iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7994 case IEMMODE_32BIT:
7995 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7996 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7997 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7998 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7999 iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
8000 case IEMMODE_64BIT:
8001 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8002 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8003 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8004 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8005 iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
8006 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8007 }
8008 break;
8009 case IEMMODE_32BIT:
8010 switch (pVCpu->iem.s.enmEffAddrMode)
8011 {
8012 case IEMMODE_16BIT:
8013 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8014 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8015 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8016 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8017 iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
8018 case IEMMODE_32BIT:
8019 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8020 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8021 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8022 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8023 iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
8024 case IEMMODE_64BIT:
8025 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8026 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8027 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8028 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8029 iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
8030 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8031 }
8032 case IEMMODE_64BIT:
8033 switch (pVCpu->iem.s.enmEffAddrMode)
8034 {
8035 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
8036 case IEMMODE_32BIT:
8037 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8038 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8039 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8040 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8041 iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
8042 case IEMMODE_64BIT:
8043 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8044 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8045 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8046 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8047 iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
8048 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8049 }
8050 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8051 }
8052 }
8053
8054 /*
8055 * Annoying double switch here.
8056 * Using ugly macro for implementing the cases, sharing it with lodsb.
8057 */
8058 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
8059 switch (pVCpu->iem.s.enmEffOpSize)
8060 {
8061 case IEMMODE_16BIT:
8062 switch (pVCpu->iem.s.enmEffAddrMode)
8063 {
8064 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
8065 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
8066 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
8067 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8068 }
8069 break;
8070
8071 case IEMMODE_32BIT:
8072 switch (pVCpu->iem.s.enmEffAddrMode)
8073 {
8074 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
8075 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
8076 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
8077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8078 }
8079 break;
8080
8081 case IEMMODE_64BIT:
8082 switch (pVCpu->iem.s.enmEffAddrMode)
8083 {
8084 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
8085 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
8086 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
8087 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8088 }
8089 break;
8090 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8091 }
8092}
8093
8094#undef IEM_LODS_CASE
8095
8096/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
8097#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
8098 IEM_MC_BEGIN(a_fMcFlags, 0); \
8099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8100 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
8101 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
8102 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8103 IEM_MC_LOCAL(RTGCPTR, uAddr); \
8104 \
8105 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
8106 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
8107 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
8108 IEM_MC_REF_EFLAGS(pEFlags); \
8109 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
8110 \
8111 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
8112 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
8113 } IEM_MC_ELSE() { \
8114 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
8115 } IEM_MC_ENDIF(); \
8116 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8117 IEM_MC_END();
8118
8119/**
8120 * @opcode 0xae
8121 * @opflclass arithmetic
8122 * @opfltest df
8123 */
8124FNIEMOP_DEF(iemOp_scasb_AL_Xb)
8125{
8126 /*
8127 * Use the C implementation if a repeat prefix is encountered.
8128 */
8129 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
8130 {
8131 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
8132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8133 switch (pVCpu->iem.s.enmEffAddrMode)
8134 {
8135 case IEMMODE_16BIT:
8136 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8137 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8138 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8139 iemCImpl_repe_scas_al_m16);
8140 case IEMMODE_32BIT:
8141 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8142 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8143 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8144 iemCImpl_repe_scas_al_m32);
8145 case IEMMODE_64BIT:
8146 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8147 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8148 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8149 iemCImpl_repe_scas_al_m64);
8150 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8151 }
8152 }
8153 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
8154 {
8155 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
8156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8157 switch (pVCpu->iem.s.enmEffAddrMode)
8158 {
8159 case IEMMODE_16BIT:
8160 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8161 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8162 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8163 iemCImpl_repne_scas_al_m16);
8164 case IEMMODE_32BIT:
8165 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8166 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8167 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8168 iemCImpl_repne_scas_al_m32);
8169 case IEMMODE_64BIT:
8170 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8171 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8172 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8173 iemCImpl_repne_scas_al_m64);
8174 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8175 }
8176 }
8177
8178 /*
8179 * Sharing case implementation with stos[wdq] below.
8180 */
8181 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
8182 switch (pVCpu->iem.s.enmEffAddrMode)
8183 {
8184 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
8185 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
8186 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
8187 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8188 }
8189}
8190
8191
8192/**
8193 * @opcode 0xaf
8194 * @opflclass arithmetic
8195 * @opfltest df
8196 */
8197FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
8198{
8199 /*
8200 * Use the C implementation if a repeat prefix is encountered.
8201 */
8202 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
8203 {
8204 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
8205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8206 switch (pVCpu->iem.s.enmEffOpSize)
8207 {
8208 case IEMMODE_16BIT:
8209 switch (pVCpu->iem.s.enmEffAddrMode)
8210 {
8211 case IEMMODE_16BIT:
8212 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8213 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8214 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8215 iemCImpl_repe_scas_ax_m16);
8216 case IEMMODE_32BIT:
8217 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8218 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8219 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8220 iemCImpl_repe_scas_ax_m32);
8221 case IEMMODE_64BIT:
8222 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8223 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8224 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8225 iemCImpl_repe_scas_ax_m64);
8226 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8227 }
8228 break;
8229 case IEMMODE_32BIT:
8230 switch (pVCpu->iem.s.enmEffAddrMode)
8231 {
8232 case IEMMODE_16BIT:
8233 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8234 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8235 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8236 iemCImpl_repe_scas_eax_m16);
8237 case IEMMODE_32BIT:
8238 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8239 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8240 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8241 iemCImpl_repe_scas_eax_m32);
8242 case IEMMODE_64BIT:
8243 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8244 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8245 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8246 iemCImpl_repe_scas_eax_m64);
8247 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8248 }
8249 case IEMMODE_64BIT:
8250 switch (pVCpu->iem.s.enmEffAddrMode)
8251 {
8252 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
8253 case IEMMODE_32BIT:
8254 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8255 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8256 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8257 iemCImpl_repe_scas_rax_m32);
8258 case IEMMODE_64BIT:
8259 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8260 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8261 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8262 iemCImpl_repe_scas_rax_m64);
8263 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8264 }
8265 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8266 }
8267 }
8268 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
8269 {
8270 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
8271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8272 switch (pVCpu->iem.s.enmEffOpSize)
8273 {
8274 case IEMMODE_16BIT:
8275 switch (pVCpu->iem.s.enmEffAddrMode)
8276 {
8277 case IEMMODE_16BIT:
8278 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8279 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8280 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8281 iemCImpl_repne_scas_ax_m16);
8282 case IEMMODE_32BIT:
8283 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8284 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8285 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8286 iemCImpl_repne_scas_ax_m32);
8287 case IEMMODE_64BIT:
8288 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8289 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8290 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8291 iemCImpl_repne_scas_ax_m64);
8292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8293 }
8294 break;
8295 case IEMMODE_32BIT:
8296 switch (pVCpu->iem.s.enmEffAddrMode)
8297 {
8298 case IEMMODE_16BIT:
8299 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8300 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8301 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8302 iemCImpl_repne_scas_eax_m16);
8303 case IEMMODE_32BIT:
8304 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8305 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8306 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8307 iemCImpl_repne_scas_eax_m32);
8308 case IEMMODE_64BIT:
8309 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8310 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8311 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8312 iemCImpl_repne_scas_eax_m64);
8313 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8314 }
8315 case IEMMODE_64BIT:
8316 switch (pVCpu->iem.s.enmEffAddrMode)
8317 {
8318 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
8319 case IEMMODE_32BIT:
8320 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8321 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8322 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8323 iemCImpl_repne_scas_rax_m32);
8324 case IEMMODE_64BIT:
8325 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8326 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8327 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8328 iemCImpl_repne_scas_rax_m64);
8329 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8330 }
8331 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8332 }
8333 }
8334
8335 /*
8336 * Annoying double switch here.
8337 * Using ugly macro for implementing the cases, sharing it with scasb.
8338 */
8339 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
8340 switch (pVCpu->iem.s.enmEffOpSize)
8341 {
8342 case IEMMODE_16BIT:
8343 switch (pVCpu->iem.s.enmEffAddrMode)
8344 {
8345 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
8346 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
8347 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
8348 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8349 }
8350 break;
8351
8352 case IEMMODE_32BIT:
8353 switch (pVCpu->iem.s.enmEffAddrMode)
8354 {
8355 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
8356 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
8357 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
8358 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8359 }
8360 break;
8361
8362 case IEMMODE_64BIT:
8363 switch (pVCpu->iem.s.enmEffAddrMode)
8364 {
8365 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
8366 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
8367 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
8368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8369 }
8370 break;
8371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8372 }
8373}
8374
8375#undef IEM_SCAS_CASE
8376
8377/**
8378 * Common 'mov r8, imm8' helper.
8379 */
8380FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
8381{
8382 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8383 IEM_MC_BEGIN(0, 0);
8384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8385 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
8386 IEM_MC_ADVANCE_RIP_AND_FINISH();
8387 IEM_MC_END();
8388}
8389
8390
8391/**
8392 * @opcode 0xb0
8393 */
8394FNIEMOP_DEF(iemOp_mov_AL_Ib)
8395{
8396 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
8397 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8398}
8399
8400
8401/**
8402 * @opcode 0xb1
8403 */
8404FNIEMOP_DEF(iemOp_CL_Ib)
8405{
8406 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
8407 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8408}
8409
8410
8411/**
8412 * @opcode 0xb2
8413 */
8414FNIEMOP_DEF(iemOp_DL_Ib)
8415{
8416 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
8417 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8418}
8419
8420
8421/**
8422 * @opcode 0xb3
8423 */
8424FNIEMOP_DEF(iemOp_BL_Ib)
8425{
8426 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
8427 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8428}
8429
8430
8431/**
8432 * @opcode 0xb4
8433 */
8434FNIEMOP_DEF(iemOp_mov_AH_Ib)
8435{
8436 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
8437 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8438}
8439
8440
8441/**
8442 * @opcode 0xb5
8443 */
8444FNIEMOP_DEF(iemOp_CH_Ib)
8445{
8446 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
8447 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8448}
8449
8450
8451/**
8452 * @opcode 0xb6
8453 */
8454FNIEMOP_DEF(iemOp_DH_Ib)
8455{
8456 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
8457 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8458}
8459
8460
8461/**
8462 * @opcode 0xb7
8463 */
8464FNIEMOP_DEF(iemOp_BH_Ib)
8465{
8466 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
8467 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8468}
8469
8470
8471/**
8472 * Common 'mov regX,immX' helper.
8473 */
8474FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
8475{
8476 switch (pVCpu->iem.s.enmEffOpSize)
8477 {
8478 case IEMMODE_16BIT:
8479 IEM_MC_BEGIN(0, 0);
8480 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8482 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
8483 IEM_MC_ADVANCE_RIP_AND_FINISH();
8484 IEM_MC_END();
8485 break;
8486
8487 case IEMMODE_32BIT:
8488 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8489 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8491 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
8492 IEM_MC_ADVANCE_RIP_AND_FINISH();
8493 IEM_MC_END();
8494 break;
8495
8496 case IEMMODE_64BIT:
8497 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
8498 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
8499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8500 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
8501 IEM_MC_ADVANCE_RIP_AND_FINISH();
8502 IEM_MC_END();
8503 break;
8504 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8505 }
8506}
8507
8508
8509/**
8510 * @opcode 0xb8
8511 */
8512FNIEMOP_DEF(iemOp_eAX_Iv)
8513{
8514 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
8515 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8516}
8517
8518
8519/**
8520 * @opcode 0xb9
8521 */
8522FNIEMOP_DEF(iemOp_eCX_Iv)
8523{
8524 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
8525 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8526}
8527
8528
8529/**
8530 * @opcode 0xba
8531 */
8532FNIEMOP_DEF(iemOp_eDX_Iv)
8533{
8534 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
8535 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8536}
8537
8538
8539/**
8540 * @opcode 0xbb
8541 */
8542FNIEMOP_DEF(iemOp_eBX_Iv)
8543{
8544 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
8545 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8546}
8547
8548
8549/**
8550 * @opcode 0xbc
8551 */
8552FNIEMOP_DEF(iemOp_eSP_Iv)
8553{
8554 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
8555 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8556}
8557
8558
8559/**
8560 * @opcode 0xbd
8561 */
8562FNIEMOP_DEF(iemOp_eBP_Iv)
8563{
8564 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
8565 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8566}
8567
8568
8569/**
8570 * @opcode 0xbe
8571 */
8572FNIEMOP_DEF(iemOp_eSI_Iv)
8573{
8574 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
8575 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8576}
8577
8578
8579/**
8580 * @opcode 0xbf
8581 */
8582FNIEMOP_DEF(iemOp_eDI_Iv)
8583{
8584 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
8585 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8586}
8587
8588
8589/**
8590 * @opcode 0xc0
8591 */
8592FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
8593{
8594 IEMOP_HLP_MIN_186();
8595 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8596
8597 /* Need to use a body macro here since the EFLAGS behaviour differs between
8598 the shifts, rotates and rotate w/ carry. Sigh. */
8599#define GRP2_BODY_Eb_Ib(a_pImplExpr) \
8600 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8601 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8602 { \
8603 /* register */ \
8604 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8605 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0); \
8606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8607 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8608 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8609 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8610 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8611 IEM_MC_REF_EFLAGS(pEFlags); \
8612 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8613 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8614 IEM_MC_END(); \
8615 } \
8616 else \
8617 { \
8618 /* memory */ \
8619 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0); \
8620 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8622 \
8623 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8625 \
8626 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8627 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8628 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8629 \
8630 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8631 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8632 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8633 \
8634 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8635 IEM_MC_COMMIT_EFLAGS(EFlags); \
8636 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8637 IEM_MC_END(); \
8638 } (void)0
8639
8640 switch (IEM_GET_MODRM_REG_8(bRm))
8641 {
8642 /**
8643 * @opdone
8644 * @opmaps grp2_c0
8645 * @opcode /0
8646 * @opflclass rotate_count
8647 */
8648 case 0:
8649 {
8650 IEMOP_MNEMONIC2(MI, ROL, rol, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8651 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8652 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8653 break;
8654 }
8655 /**
8656 * @opdone
8657 * @opmaps grp2_c0
8658 * @opcode /1
8659 * @opflclass rotate_count
8660 */
8661 case 1:
8662 {
8663 IEMOP_MNEMONIC2(MI, ROR, ror, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8664 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8665 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8666 break;
8667 }
8668 /**
8669 * @opdone
8670 * @opmaps grp2_c0
8671 * @opcode /2
8672 * @opflclass rotate_carry_count
8673 */
8674 case 2:
8675 {
8676 IEMOP_MNEMONIC2(MI, RCL, rcl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8677 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8678 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8679 break;
8680 }
8681 /**
8682 * @opdone
8683 * @opmaps grp2_c0
8684 * @opcode /3
8685 * @opflclass rotate_carry_count
8686 */
8687 case 3:
8688 {
8689 IEMOP_MNEMONIC2(MI, RCR, rcr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8690 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8691 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8692 break;
8693 }
8694 /**
8695 * @opdone
8696 * @opmaps grp2_c0
8697 * @opcode /4
8698 * @opflclass shift_count
8699 */
8700 case 4:
8701 {
8702 IEMOP_MNEMONIC2(MI, SHL, shl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8703 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8704 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8705 break;
8706 }
8707 /**
8708 * @opdone
8709 * @opmaps grp2_c0
8710 * @opcode /5
8711 * @opflclass shift_count
8712 */
8713 case 5:
8714 {
8715 IEMOP_MNEMONIC2(MI, SHR, shr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8716 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8717 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8718 break;
8719 }
8720 /**
8721 * @opdone
8722 * @opmaps grp2_c0
8723 * @opcode /7
8724 * @opflclass shift_count
8725 */
8726 case 7:
8727 {
8728 IEMOP_MNEMONIC2(MI, SAR, sar, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8729 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8730 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8731 break;
8732 }
8733
8734 /** @opdone */
8735 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8736 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8737 }
8738#undef GRP2_BODY_Eb_Ib
8739}
8740
8741
8742/* Need to use a body macro here since the EFLAGS behaviour differs between
8743 the shifts, rotates and rotate w/ carry. Sigh. */
8744#define GRP2_BODY_Ev_Ib(a_pImplExpr) \
8745 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8746 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8747 { \
8748 /* register */ \
8749 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8750 switch (pVCpu->iem.s.enmEffOpSize) \
8751 { \
8752 case IEMMODE_16BIT: \
8753 IEM_MC_BEGIN(IEM_MC_F_MIN_186, 0); \
8754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8755 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8756 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8757 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8758 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8759 IEM_MC_REF_EFLAGS(pEFlags); \
8760 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
8761 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8762 IEM_MC_END(); \
8763 break; \
8764 \
8765 case IEMMODE_32BIT: \
8766 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8768 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8769 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8770 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8771 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8772 IEM_MC_REF_EFLAGS(pEFlags); \
8773 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
8774 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8775 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8776 IEM_MC_END(); \
8777 break; \
8778 \
8779 case IEMMODE_64BIT: \
8780 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
8781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8782 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8783 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8784 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8785 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8786 IEM_MC_REF_EFLAGS(pEFlags); \
8787 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
8788 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8789 IEM_MC_END(); \
8790 break; \
8791 \
8792 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8793 } \
8794 } \
8795 else \
8796 { \
8797 /* memory */ \
8798 switch (pVCpu->iem.s.enmEffOpSize) \
8799 { \
8800 case IEMMODE_16BIT: \
8801 IEM_MC_BEGIN(0, 0); \
8802 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8804 \
8805 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8807 \
8808 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8809 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8810 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8811 \
8812 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8813 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8814 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
8815 \
8816 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8817 IEM_MC_COMMIT_EFLAGS(EFlags); \
8818 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8819 IEM_MC_END(); \
8820 break; \
8821 \
8822 case IEMMODE_32BIT: \
8823 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8826 \
8827 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8829 \
8830 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8831 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8832 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8833 \
8834 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8835 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8836 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
8837 \
8838 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8839 IEM_MC_COMMIT_EFLAGS(EFlags); \
8840 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8841 IEM_MC_END(); \
8842 break; \
8843 \
8844 case IEMMODE_64BIT: \
8845 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
8846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8848 \
8849 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8851 \
8852 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8853 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8854 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8855 \
8856 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8857 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8858 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
8859 \
8860 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8861 IEM_MC_COMMIT_EFLAGS(EFlags); \
8862 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8863 IEM_MC_END(); \
8864 break; \
8865 \
8866 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8867 } \
8868 } (void)0
8869
8870/**
8871 * @opmaps grp2_c1
8872 * @opcode /0
8873 * @opflclass rotate_count
8874 */
8875FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_Ib, uint8_t, bRm)
8876{
8877 IEMOP_MNEMONIC2(MI, ROL, rol, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8878 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8879}
8880
8881
8882/**
8883 * @opmaps grp2_c1
8884 * @opcode /1
8885 * @opflclass rotate_count
8886 */
8887FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_Ib, uint8_t, bRm)
8888{
8889 IEMOP_MNEMONIC2(MI, ROR, ror, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8890 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8891}
8892
8893
8894/**
8895 * @opmaps grp2_c1
8896 * @opcode /2
8897 * @opflclass rotate_carry_count
8898 */
8899FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_Ib, uint8_t, bRm)
8900{
8901 IEMOP_MNEMONIC2(MI, RCL, rcl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8902 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8903}
8904
8905
8906/**
8907 * @opmaps grp2_c1
8908 * @opcode /3
8909 * @opflclass rotate_carry_count
8910 */
8911FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_Ib, uint8_t, bRm)
8912{
8913 IEMOP_MNEMONIC2(MI, RCR, rcr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8914 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8915}
8916
8917
8918/**
8919 * @opmaps grp2_c1
8920 * @opcode /4
8921 * @opflclass shift_count
8922 */
8923FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_Ib, uint8_t, bRm)
8924{
8925 IEMOP_MNEMONIC2(MI, SHL, shl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8926 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8927}
8928
8929
8930/**
8931 * @opmaps grp2_c1
8932 * @opcode /5
8933 * @opflclass shift_count
8934 */
8935FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_Ib, uint8_t, bRm)
8936{
8937 IEMOP_MNEMONIC2(MI, SHR, shr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8938 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8939}
8940
8941
8942/**
8943 * @opmaps grp2_c1
8944 * @opcode /7
8945 * @opflclass shift_count
8946 */
8947FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_Ib, uint8_t, bRm)
8948{
8949 IEMOP_MNEMONIC2(MI, SAR, sar, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8950 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8951}
8952
8953#undef GRP2_BODY_Ev_Ib
8954
8955/**
8956 * @opcode 0xc1
8957 */
8958FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
8959{
8960 IEMOP_HLP_MIN_186();
8961 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8962
8963 switch (IEM_GET_MODRM_REG_8(bRm))
8964 {
8965 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_Ib, bRm);
8966 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_Ib, bRm);
8967 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_Ib, bRm);
8968 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_Ib, bRm);
8969 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_Ib, bRm);
8970 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_Ib, bRm);
8971 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_Ib, bRm);
8972 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8973 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8974 }
8975}
8976
8977
8978/**
8979 * @opcode 0xc2
8980 */
8981FNIEMOP_DEF(iemOp_retn_Iw)
8982{
8983 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
8984 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8985 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8987 switch (pVCpu->iem.s.enmEffOpSize)
8988 {
8989 case IEMMODE_16BIT:
8990 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8991 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_16, u16Imm);
8992 case IEMMODE_32BIT:
8993 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8994 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_32, u16Imm);
8995 case IEMMODE_64BIT:
8996 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8997 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_64, u16Imm);
8998 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8999 }
9000}
9001
9002
9003/**
9004 * @opcode 0xc3
9005 */
9006FNIEMOP_DEF(iemOp_retn)
9007{
9008 IEMOP_MNEMONIC(retn, "retn");
9009 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
9010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9011 switch (pVCpu->iem.s.enmEffOpSize)
9012 {
9013 case IEMMODE_16BIT:
9014 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
9015 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_16);
9016 case IEMMODE_32BIT:
9017 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
9018 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_32);
9019 case IEMMODE_64BIT:
9020 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
9021 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_64);
9022 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9023 }
9024}
9025
9026
9027/**
9028 * @opcode 0xc4
9029 */
9030FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
9031{
9032 /* The LDS instruction is invalid 64-bit mode. In legacy and
9033 compatability mode it is invalid with MOD=3.
9034 The use as a VEX prefix is made possible by assigning the inverted
9035 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
9036 outside of 64-bit mode. VEX is not available in real or v86 mode. */
9037 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9038 if ( IEM_IS_64BIT_CODE(pVCpu)
9039 || IEM_IS_MODRM_REG_MODE(bRm) )
9040 {
9041 IEMOP_MNEMONIC(vex3_prefix, "vex3");
9042 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
9043 {
9044 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
9045 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
9046 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
9047 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
9048 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
9049#if 1
9050 AssertCompile(IEM_OP_PRF_SIZE_REX_W == RT_BIT_32(9));
9051 pVCpu->iem.s.fPrefixes |= (uint32_t)(bVex2 & 0x80) << (9 - 7);
9052#else
9053 if (bVex2 & 0x80 /* VEX.W */)
9054 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
9055#endif
9056 if (IEM_IS_64BIT_CODE(pVCpu))
9057 {
9058#if 1
9059 AssertCompile(IEM_OP_PRF_REX_B == RT_BIT_32(25) && IEM_OP_PRF_REX_X == RT_BIT_32(26) && IEM_OP_PRF_REX_R == RT_BIT_32(27));
9060 pVCpu->iem.s.fPrefixes |= (uint32_t)(~bRm & 0xe0) << (25 - 5);
9061#else
9062 if (~bRm & 0x20 /* VEX.~B */)
9063 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_B;
9064 if (~bRm & 0x40 /* VEX.~X */)
9065 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_X;
9066 if (~bRm & 0x80 /* VEX.~R */)
9067 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_R;
9068#endif
9069 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
9070 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
9071 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
9072 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
9073 }
9074 else
9075 {
9076 pVCpu->iem.s.uRexReg = 0;
9077 pVCpu->iem.s.uRexIndex = 0;
9078 pVCpu->iem.s.uRexB = 0;
9079 /** @todo testcase: Will attemps to access registers 8 thru 15 from 16&32 bit
9080 * code raise \#UD or just be ignored? We're ignoring for now... */
9081 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0x7;
9082 }
9083 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
9084 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
9085
9086 switch (bRm & 0x1f)
9087 {
9088 case 1: /* 0x0f lead opcode byte. */
9089#ifdef IEM_WITH_VEX
9090 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
9091#else
9092 IEMOP_BITCH_ABOUT_STUB();
9093 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9094#endif
9095
9096 case 2: /* 0x0f 0x38 lead opcode bytes. */
9097#ifdef IEM_WITH_VEX
9098 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
9099#else
9100 IEMOP_BITCH_ABOUT_STUB();
9101 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9102#endif
9103
9104 case 3: /* 0x0f 0x3a lead opcode bytes. */
9105#ifdef IEM_WITH_VEX
9106 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
9107#else
9108 IEMOP_BITCH_ABOUT_STUB();
9109 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9110#endif
9111
9112 default:
9113 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
9114 IEMOP_RAISE_INVALID_OPCODE_RET();
9115 }
9116 }
9117 Log(("VEX3: VEX support disabled!\n"));
9118 IEMOP_RAISE_INVALID_OPCODE_RET();
9119 }
9120
9121 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
9122 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
9123}
9124
9125
9126/**
9127 * @opcode 0xc5
9128 */
9129FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
9130{
9131 /* The LES instruction is invalid 64-bit mode. In legacy and
9132 compatability mode it is invalid with MOD=3.
9133 The use as a VEX prefix is made possible by assigning the inverted
9134 REX.R to the top MOD bit, and the top bit in the inverted register
9135 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
9136 to accessing registers 0..7 in this VEX form. */
9137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9138 if ( IEM_IS_64BIT_CODE(pVCpu)
9139 || IEM_IS_MODRM_REG_MODE(bRm))
9140 {
9141 IEMOP_MNEMONIC(vex2_prefix, "vex2");
9142 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
9143 {
9144 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
9145 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
9146 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
9147 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
9148 AssertCompile(IEM_OP_PRF_REX_R == RT_BIT_32(27));
9149 pVCpu->iem.s.fPrefixes |= (uint32_t)(~bRm & 0x80) << (27 - 7);
9150 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
9151 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
9152 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
9153 pVCpu->iem.s.idxPrefix = bRm & 0x3;
9154
9155#ifdef IEM_WITH_VEX
9156 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
9157#else
9158 IEMOP_BITCH_ABOUT_STUB();
9159 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9160#endif
9161 }
9162
9163 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
9164 Log(("VEX2: VEX support disabled!\n"));
9165 IEMOP_RAISE_INVALID_OPCODE_RET();
9166 }
9167
9168 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
9169 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
9170}
9171
9172
9173/**
9174 * @opcode 0xc6
9175 */
9176FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
9177{
9178 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9179 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
9180 IEMOP_RAISE_INVALID_OPCODE_RET();
9181 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
9182
9183 if (IEM_IS_MODRM_REG_MODE(bRm))
9184 {
9185 /* register access */
9186 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9187 IEM_MC_BEGIN(0, 0);
9188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9189 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
9190 IEM_MC_ADVANCE_RIP_AND_FINISH();
9191 IEM_MC_END();
9192 }
9193 else
9194 {
9195 /* memory access. */
9196 IEM_MC_BEGIN(0, 0);
9197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9199 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9201 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
9202 IEM_MC_ADVANCE_RIP_AND_FINISH();
9203 IEM_MC_END();
9204 }
9205}
9206
9207
9208/**
9209 * @opcode 0xc7
9210 */
9211FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
9212{
9213 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9214 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
9215 IEMOP_RAISE_INVALID_OPCODE_RET();
9216 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
9217
9218 if (IEM_IS_MODRM_REG_MODE(bRm))
9219 {
9220 /* register access */
9221 switch (pVCpu->iem.s.enmEffOpSize)
9222 {
9223 case IEMMODE_16BIT:
9224 IEM_MC_BEGIN(0, 0);
9225 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9227 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
9228 IEM_MC_ADVANCE_RIP_AND_FINISH();
9229 IEM_MC_END();
9230 break;
9231
9232 case IEMMODE_32BIT:
9233 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
9234 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9236 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
9237 IEM_MC_ADVANCE_RIP_AND_FINISH();
9238 IEM_MC_END();
9239 break;
9240
9241 case IEMMODE_64BIT:
9242 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9243 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9245 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
9246 IEM_MC_ADVANCE_RIP_AND_FINISH();
9247 IEM_MC_END();
9248 break;
9249
9250 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9251 }
9252 }
9253 else
9254 {
9255 /* memory access. */
9256 switch (pVCpu->iem.s.enmEffOpSize)
9257 {
9258 case IEMMODE_16BIT:
9259 IEM_MC_BEGIN(0, 0);
9260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9262 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9264 IEM_MC_STORE_MEM_U16_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
9265 IEM_MC_ADVANCE_RIP_AND_FINISH();
9266 IEM_MC_END();
9267 break;
9268
9269 case IEMMODE_32BIT:
9270 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
9271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9273 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9275 IEM_MC_STORE_MEM_U32_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
9276 IEM_MC_ADVANCE_RIP_AND_FINISH();
9277 IEM_MC_END();
9278 break;
9279
9280 case IEMMODE_64BIT:
9281 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9284 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9286 IEM_MC_STORE_MEM_U64_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
9287 IEM_MC_ADVANCE_RIP_AND_FINISH();
9288 IEM_MC_END();
9289 break;
9290
9291 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9292 }
9293 }
9294}
9295
9296
9297
9298
9299/**
9300 * @opcode 0xc8
9301 */
9302FNIEMOP_DEF(iemOp_enter_Iw_Ib)
9303{
9304 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
9305 IEMOP_HLP_MIN_186();
9306 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9307 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
9308 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
9309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9310 IEM_MC_DEFER_TO_CIMPL_3_RET(0,
9311 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9312 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
9313 iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
9314}
9315
9316
9317/**
9318 * @opcode 0xc9
9319 */
9320FNIEMOP_DEF(iemOp_leave)
9321{
9322 IEMOP_MNEMONIC(leave, "leave");
9323 IEMOP_HLP_MIN_186();
9324 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9326 IEM_MC_DEFER_TO_CIMPL_1_RET(0,
9327 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9328 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
9329 iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
9330}
9331
9332
9333/**
9334 * @opcode 0xca
9335 */
9336FNIEMOP_DEF(iemOp_retf_Iw)
9337{
9338 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
9339 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9341 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
9342 | IEM_CIMPL_F_MODE,
9343 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9344 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9345 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9346 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9347 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9348 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9349 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9350 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9351 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9352 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9353 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9354 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9355 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9356 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9357 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9358 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9359 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9360 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
9361}
9362
9363
9364/**
9365 * @opcode 0xcb
9366 */
9367FNIEMOP_DEF(iemOp_retf)
9368{
9369 IEMOP_MNEMONIC(retf, "retf");
9370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9371 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
9372 | IEM_CIMPL_F_MODE,
9373 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9374 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9375 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9376 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9377 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9378 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9379 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9380 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9381 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9382 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9383 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9384 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9385 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9386 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9387 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9388 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9389 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9390 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
9391}
9392
9393
9394/**
9395 * @opcode 0xcc
9396 */
9397FNIEMOP_DEF(iemOp_int3)
9398{
9399 IEMOP_MNEMONIC(int3, "int3");
9400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9401 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9402 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
9403 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
9404}
9405
9406
9407/**
9408 * @opcode 0xcd
9409 */
9410FNIEMOP_DEF(iemOp_int_Ib)
9411{
9412 IEMOP_MNEMONIC(int_Ib, "int Ib");
9413 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
9414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9415 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9416 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, UINT64_MAX,
9417 iemCImpl_int, u8Int, IEMINT_INTN);
9418 /** @todo make task-switches, ring-switches, ++ return non-zero status */
9419}
9420
9421
9422/**
9423 * @opcode 0xce
9424 */
9425FNIEMOP_DEF(iemOp_into)
9426{
9427 IEMOP_MNEMONIC(into, "into");
9428 IEMOP_HLP_NO_64BIT();
9429 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9430 | IEM_CIMPL_F_BRANCH_CONDITIONAL | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
9431 UINT64_MAX,
9432 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
9433 /** @todo make task-switches, ring-switches, ++ return non-zero status */
9434}
9435
9436
9437/**
9438 * @opcode 0xcf
9439 */
9440FNIEMOP_DEF(iemOp_iret)
9441{
9442 IEMOP_MNEMONIC(iret, "iret");
9443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9444 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9445 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
9446 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9447 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9448 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9449 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9450 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9451 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9452 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9453 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9454 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9455 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9456 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9457 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9458 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9459 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9460 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9461 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9462 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9463 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
9464 /* Segment registers are sanitized when returning to an outer ring, or fully
9465 reloaded when returning to v86 mode. Thus the large flush list above. */
9466}
9467
9468
9469/**
9470 * @opcode 0xd0
9471 */
9472FNIEMOP_DEF(iemOp_Grp2_Eb_1)
9473{
9474 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9475
9476 /* Need to use a body macro here since the EFLAGS behaviour differs between
9477 the shifts, rotates and rotate w/ carry. Sigh. */
9478#define GRP2_BODY_Eb_1(a_pImplExpr) \
9479 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9480 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9481 { \
9482 /* register */ \
9483 IEM_MC_BEGIN(0, 0); \
9484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9485 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9486 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
9487 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9488 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9489 IEM_MC_REF_EFLAGS(pEFlags); \
9490 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9491 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9492 IEM_MC_END(); \
9493 } \
9494 else \
9495 { \
9496 /* memory */ \
9497 IEM_MC_BEGIN(0, 0); \
9498 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9499 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
9500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9501 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9502 \
9503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9505 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9506 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9507 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9508 \
9509 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9510 IEM_MC_COMMIT_EFLAGS(EFlags); \
9511 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9512 IEM_MC_END(); \
9513 } (void)0
9514
9515 switch (IEM_GET_MODRM_REG_8(bRm))
9516 {
9517 /**
9518 * @opdone
9519 * @opmaps grp2_d0
9520 * @opcode /0
9521 * @opflclass rotate_1
9522 */
9523 case 0:
9524 {
9525 IEMOP_MNEMONIC2(M1, ROL, rol, Eb, 1, DISOPTYPE_HARMLESS, 0);
9526 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9527 break;
9528 }
9529 /**
9530 * @opdone
9531 * @opmaps grp2_d0
9532 * @opcode /1
9533 * @opflclass rotate_1
9534 */
9535 case 1:
9536 {
9537 IEMOP_MNEMONIC2(M1, ROR, ror, Eb, 1, DISOPTYPE_HARMLESS, 0);
9538 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9539 break;
9540 }
9541 /**
9542 * @opdone
9543 * @opmaps grp2_d0
9544 * @opcode /2
9545 * @opflclass rotate_carry_1
9546 */
9547 case 2:
9548 {
9549 IEMOP_MNEMONIC2(M1, RCL, rcl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9550 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9551 break;
9552 }
9553 /**
9554 * @opdone
9555 * @opmaps grp2_d0
9556 * @opcode /3
9557 * @opflclass rotate_carry_1
9558 */
9559 case 3:
9560 {
9561 IEMOP_MNEMONIC2(M1, RCR, rcr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9562 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9563 break;
9564 }
9565 /**
9566 * @opdone
9567 * @opmaps grp2_d0
9568 * @opcode /4
9569 * @opflclass shift_1
9570 */
9571 case 4:
9572 {
9573 IEMOP_MNEMONIC2(M1, SHL, shl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9574 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9575 break;
9576 }
9577 /**
9578 * @opdone
9579 * @opmaps grp2_d0
9580 * @opcode /5
9581 * @opflclass shift_1
9582 */
9583 case 5:
9584 {
9585 IEMOP_MNEMONIC2(M1, SHR, shr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9586 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9587 break;
9588 }
9589 /**
9590 * @opdone
9591 * @opmaps grp2_d0
9592 * @opcode /7
9593 * @opflclass shift_1
9594 */
9595 case 7:
9596 {
9597 IEMOP_MNEMONIC2(M1, SAR, sar, Eb, 1, DISOPTYPE_HARMLESS, 0);
9598 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9599 break;
9600 }
9601 /** @opdone */
9602 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9603 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9604 }
9605#undef GRP2_BODY_Eb_1
9606}
9607
9608
9609/* Need to use a body macro here since the EFLAGS behaviour differs between
9610 the shifts, rotates and rotate w/ carry. Sigh. */
9611#define GRP2_BODY_Ev_1(a_pImplExpr) \
9612 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9613 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9614 { \
9615 /* register */ \
9616 switch (pVCpu->iem.s.enmEffOpSize) \
9617 { \
9618 case IEMMODE_16BIT: \
9619 IEM_MC_BEGIN(0, 0); \
9620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9621 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9622 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9623 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9624 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9625 IEM_MC_REF_EFLAGS(pEFlags); \
9626 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9627 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9628 IEM_MC_END(); \
9629 break; \
9630 \
9631 case IEMMODE_32BIT: \
9632 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9634 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9635 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9636 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9637 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9638 IEM_MC_REF_EFLAGS(pEFlags); \
9639 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9640 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9641 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9642 IEM_MC_END(); \
9643 break; \
9644 \
9645 case IEMMODE_64BIT: \
9646 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9648 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9649 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9650 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9651 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9652 IEM_MC_REF_EFLAGS(pEFlags); \
9653 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9654 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9655 IEM_MC_END(); \
9656 break; \
9657 \
9658 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9659 } \
9660 } \
9661 else \
9662 { \
9663 /* memory */ \
9664 switch (pVCpu->iem.s.enmEffOpSize) \
9665 { \
9666 case IEMMODE_16BIT: \
9667 IEM_MC_BEGIN(0, 0); \
9668 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9669 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9671 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9672 \
9673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9675 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9676 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9677 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9678 \
9679 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9680 IEM_MC_COMMIT_EFLAGS(EFlags); \
9681 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9682 IEM_MC_END(); \
9683 break; \
9684 \
9685 case IEMMODE_32BIT: \
9686 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9687 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9688 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9690 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9691 \
9692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9694 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9695 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9696 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9697 \
9698 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9699 IEM_MC_COMMIT_EFLAGS(EFlags); \
9700 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9701 IEM_MC_END(); \
9702 break; \
9703 \
9704 case IEMMODE_64BIT: \
9705 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9706 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9707 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9709 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9710 \
9711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9713 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9714 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9715 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9716 \
9717 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9718 IEM_MC_COMMIT_EFLAGS(EFlags); \
9719 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9720 IEM_MC_END(); \
9721 break; \
9722 \
9723 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9724 } \
9725 } (void)0
9726
9727/**
9728 * @opmaps grp2_d1
9729 * @opcode /0
9730 * @opflclass rotate_1
9731 */
9732FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_1, uint8_t, bRm)
9733{
9734 IEMOP_MNEMONIC2(M1, ROL, rol, Ev, 1, DISOPTYPE_HARMLESS, 0);
9735 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9736}
9737
9738
9739/**
9740 * @opmaps grp2_d1
9741 * @opcode /1
9742 * @opflclass rotate_1
9743 */
9744FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_1, uint8_t, bRm)
9745{
9746 IEMOP_MNEMONIC2(M1, ROR, ror, Ev, 1, DISOPTYPE_HARMLESS, 0);
9747 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9748}
9749
9750
9751/**
9752 * @opmaps grp2_d1
9753 * @opcode /2
9754 * @opflclass rotate_carry_1
9755 */
9756FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_1, uint8_t, bRm)
9757{
9758 IEMOP_MNEMONIC2(M1, RCL, rcl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9759 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9760}
9761
9762
9763/**
9764 * @opmaps grp2_d1
9765 * @opcode /3
9766 * @opflclass rotate_carry_1
9767 */
9768FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_1, uint8_t, bRm)
9769{
9770 IEMOP_MNEMONIC2(M1, RCR, rcr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9771 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9772}
9773
9774
9775/**
9776 * @opmaps grp2_d1
9777 * @opcode /4
9778 * @opflclass shift_1
9779 */
9780FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_1, uint8_t, bRm)
9781{
9782 IEMOP_MNEMONIC2(M1, SHL, shl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9783 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9784}
9785
9786
9787/**
9788 * @opmaps grp2_d1
9789 * @opcode /5
9790 * @opflclass shift_1
9791 */
9792FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_1, uint8_t, bRm)
9793{
9794 IEMOP_MNEMONIC2(M1, SHR, shr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9795 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9796}
9797
9798
9799/**
9800 * @opmaps grp2_d1
9801 * @opcode /7
9802 * @opflclass shift_1
9803 */
9804FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_1, uint8_t, bRm)
9805{
9806 IEMOP_MNEMONIC2(M1, SAR, sar, Ev, 1, DISOPTYPE_HARMLESS, 0);
9807 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9808}
9809
9810#undef GRP2_BODY_Ev_1
9811
9812/**
9813 * @opcode 0xd1
9814 */
9815FNIEMOP_DEF(iemOp_Grp2_Ev_1)
9816{
9817 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9818 switch (IEM_GET_MODRM_REG_8(bRm))
9819 {
9820 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_1, bRm);
9821 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_1, bRm);
9822 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_1, bRm);
9823 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_1, bRm);
9824 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_1, bRm);
9825 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_1, bRm);
9826 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_1, bRm);
9827 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9828 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9829 }
9830}
9831
9832
9833/**
9834 * @opcode 0xd2
9835 */
9836FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
9837{
9838 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9839
9840 /* Need to use a body macro here since the EFLAGS behaviour differs between
9841 the shifts, rotates and rotate w/ carry. Sigh. */
9842#define GRP2_BODY_Eb_CL(a_pImplExpr) \
9843 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9844 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9845 { \
9846 /* register */ \
9847 IEM_MC_BEGIN(0, 0); \
9848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9849 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9850 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9851 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9852 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9853 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9854 IEM_MC_REF_EFLAGS(pEFlags); \
9855 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9856 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9857 IEM_MC_END(); \
9858 } \
9859 else \
9860 { \
9861 /* memory */ \
9862 IEM_MC_BEGIN(0, 0); \
9863 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9864 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9866 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9867 \
9868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9870 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9871 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9872 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9873 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9874 \
9875 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9876 IEM_MC_COMMIT_EFLAGS(EFlags); \
9877 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9878 IEM_MC_END(); \
9879 } (void)0
9880
9881 switch (IEM_GET_MODRM_REG_8(bRm))
9882 {
9883 /**
9884 * @opdone
9885 * @opmaps grp2_d0
9886 * @opcode /0
9887 * @opflclass rotate_count
9888 */
9889 case 0:
9890 {
9891 IEMOP_MNEMONIC2EX(rol_Eb_CL, "rol Eb,CL", M_CL, ROL, rol, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9892 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9893 break;
9894 }
9895 /**
9896 * @opdone
9897 * @opmaps grp2_d0
9898 * @opcode /1
9899 * @opflclass rotate_count
9900 */
9901 case 1:
9902 {
9903 IEMOP_MNEMONIC2EX(ror_Eb_CL, "ror Eb,CL", M_CL, ROR, ror, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9904 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9905 break;
9906 }
9907 /**
9908 * @opdone
9909 * @opmaps grp2_d0
9910 * @opcode /2
9911 * @opflclass rotate_carry_count
9912 */
9913 case 2:
9914 {
9915 IEMOP_MNEMONIC2EX(rcl_Eb_CL, "rcl Eb,CL", M_CL, RCL, rcl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9916 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9917 break;
9918 }
9919 /**
9920 * @opdone
9921 * @opmaps grp2_d0
9922 * @opcode /3
9923 * @opflclass rotate_carry_count
9924 */
9925 case 3:
9926 {
9927 IEMOP_MNEMONIC2EX(rcr_Eb_CL, "rcr Eb,CL", M_CL, RCR, rcr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9928 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9929 break;
9930 }
9931 /**
9932 * @opdone
9933 * @opmaps grp2_d0
9934 * @opcode /4
9935 * @opflclass shift_count
9936 */
9937 case 4:
9938 {
9939 IEMOP_MNEMONIC2EX(shl_Eb_CL, "shl Eb,CL", M_CL, SHL, shl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9940 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9941 break;
9942 }
9943 /**
9944 * @opdone
9945 * @opmaps grp2_d0
9946 * @opcode /5
9947 * @opflclass shift_count
9948 */
9949 case 5:
9950 {
9951 IEMOP_MNEMONIC2EX(shr_Eb_CL, "shr Eb,CL", M_CL, SHR, shr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9952 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9953 break;
9954 }
9955 /**
9956 * @opdone
9957 * @opmaps grp2_d0
9958 * @opcode /7
9959 * @opflclass shift_count
9960 */
9961 case 7:
9962 {
9963 IEMOP_MNEMONIC2EX(sar_Eb_CL, "sar Eb,CL", M_CL, SAR, sar, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9964 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9965 break;
9966 }
9967 /** @opdone */
9968 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9969 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9970 }
9971#undef GRP2_BODY_Eb_CL
9972}
9973
9974
9975/* Need to use a body macro here since the EFLAGS behaviour differs between
9976 the shifts, rotates and rotate w/ carry. Sigh. */
9977#define GRP2_BODY_Ev_CL(a_Ins, a_pImplExpr, a_fRegNativeArchs, a_fMemNativeArchs) \
9978 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9979 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9980 { \
9981 /* register */ \
9982 switch (pVCpu->iem.s.enmEffOpSize) \
9983 { \
9984 case IEMMODE_16BIT: \
9985 IEM_MC_BEGIN(0, 0); \
9986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9987 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9988 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9989 /*IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
9990 IEM_MC_LOCAL(uint16_t, u16Dst); \
9991 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9992 } IEM_MC_NATIVE_ELSE() { */ \
9993 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9994 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9995 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9996 IEM_MC_REF_EFLAGS(pEFlags); \
9997 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9998 /*}*/ \
9999 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10000 IEM_MC_END(); \
10001 break; \
10002 \
10003 case IEMMODE_32BIT: \
10004 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10006 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10007 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
10008 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10009 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10010 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10011 IEM_MC_REF_EFLAGS(pEFlags); \
10012 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
10013 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
10014 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10015 IEM_MC_END(); \
10016 break; \
10017 \
10018 case IEMMODE_64BIT: \
10019 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10021 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10022 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
10023 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10024 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10025 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10026 IEM_MC_REF_EFLAGS(pEFlags); \
10027 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
10028 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10029 IEM_MC_END(); \
10030 break; \
10031 \
10032 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10033 } \
10034 } \
10035 else \
10036 { \
10037 /* memory */ \
10038 switch (pVCpu->iem.s.enmEffOpSize) \
10039 { \
10040 case IEMMODE_16BIT: \
10041 IEM_MC_BEGIN(0, 0); \
10042 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10043 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
10044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10045 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10046 \
10047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10049 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10050 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10051 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
10052 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
10053 \
10054 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10055 IEM_MC_COMMIT_EFLAGS(EFlags); \
10056 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10057 IEM_MC_END(); \
10058 break; \
10059 \
10060 case IEMMODE_32BIT: \
10061 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10062 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10063 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
10064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10065 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10066 \
10067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10069 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10070 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10071 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
10072 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
10073 \
10074 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10075 IEM_MC_COMMIT_EFLAGS(EFlags); \
10076 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10077 IEM_MC_END(); \
10078 break; \
10079 \
10080 case IEMMODE_64BIT: \
10081 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10082 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10083 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
10084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10085 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10086 \
10087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10089 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10090 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10091 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
10092 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
10093 \
10094 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10095 IEM_MC_COMMIT_EFLAGS(EFlags); \
10096 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10097 IEM_MC_END(); \
10098 break; \
10099 \
10100 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10101 } \
10102 } (void)0
10103
10104
10105/**
10106 * @opmaps grp2_d0
10107 * @opcode /0
10108 * @opflclass rotate_count
10109 */
10110FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_CL, uint8_t, bRm)
10111{
10112 IEMOP_MNEMONIC2EX(rol_Ev_CL, "rol Ev,CL", M_CL, ROL, rol, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10113 GRP2_BODY_Ev_CL(rol, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags), 0, 0);
10114}
10115
10116
10117/**
10118 * @opmaps grp2_d0
10119 * @opcode /1
10120 * @opflclass rotate_count
10121 */
10122FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_CL, uint8_t, bRm)
10123{
10124 IEMOP_MNEMONIC2EX(ror_Ev_CL, "ror Ev,CL", M_CL, ROR, ror, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10125 GRP2_BODY_Ev_CL(ror, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags), 0, 0);
10126}
10127
10128
10129/**
10130 * @opmaps grp2_d0
10131 * @opcode /2
10132 * @opflclass rotate_carry_count
10133 */
10134FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_CL, uint8_t, bRm)
10135{
10136 IEMOP_MNEMONIC2EX(rcl_Ev_CL, "rcl Ev,CL", M_CL, RCL, rcl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10137 GRP2_BODY_Ev_CL(rcl, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags), 0, 0);
10138}
10139
10140
10141/**
10142 * @opmaps grp2_d0
10143 * @opcode /3
10144 * @opflclass rotate_carry_count
10145 */
10146FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_CL, uint8_t, bRm)
10147{
10148 IEMOP_MNEMONIC2EX(rcr_Ev_CL, "rcr Ev,CL", M_CL, RCR, rcr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10149 GRP2_BODY_Ev_CL(rcr, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags), 0, 0);
10150}
10151
10152
10153/**
10154 * @opmaps grp2_d0
10155 * @opcode /4
10156 * @opflclass shift_count
10157 */
10158FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_CL, uint8_t, bRm)
10159{
10160 IEMOP_MNEMONIC2EX(shl_Ev_CL, "shl Ev,CL", M_CL, SHL, shl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10161 GRP2_BODY_Ev_CL(shl, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags), 0, 0);
10162}
10163
10164
10165/**
10166 * @opmaps grp2_d0
10167 * @opcode /5
10168 * @opflclass shift_count
10169 */
10170FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_CL, uint8_t, bRm)
10171{
10172 IEMOP_MNEMONIC2EX(shr_Ev_CL, "shr Ev,CL", M_CL, SHR, shr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10173 GRP2_BODY_Ev_CL(shr, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags), 0, 0);
10174}
10175
10176
10177/**
10178 * @opmaps grp2_d0
10179 * @opcode /7
10180 * @opflclass shift_count
10181 */
10182FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_CL, uint8_t, bRm)
10183{
10184 IEMOP_MNEMONIC2EX(sar_Ev_CL, "sar Ev,CL", M_CL, SAR, sar, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10185 GRP2_BODY_Ev_CL(sar, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags), 0, 0);
10186}
10187
10188#undef GRP2_BODY_Ev_CL
10189
10190/**
10191 * @opcode 0xd3
10192 */
10193FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
10194{
10195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10196 switch (IEM_GET_MODRM_REG_8(bRm))
10197 {
10198 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_CL, bRm);
10199 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_CL, bRm);
10200 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_CL, bRm);
10201 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_CL, bRm);
10202 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_CL, bRm);
10203 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_CL, bRm);
10204 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_CL, bRm);
10205 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10206 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
10207 }
10208}
10209
10210
10211/**
10212 * @opcode 0xd4
10213 * @opflmodify cf,pf,af,zf,sf,of
10214 * @opflundef cf,af,of
10215 */
10216FNIEMOP_DEF(iemOp_aam_Ib)
10217{
10218/** @todo testcase: aam */
10219 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
10220 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
10221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10222 IEMOP_HLP_NO_64BIT();
10223 if (!bImm)
10224 IEMOP_RAISE_DIVIDE_ERROR_RET();
10225 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aam, bImm);
10226}
10227
10228
10229/**
10230 * @opcode 0xd5
10231 * @opflmodify cf,pf,af,zf,sf,of
10232 * @opflundef cf,af,of
10233 */
10234FNIEMOP_DEF(iemOp_aad_Ib)
10235{
10236/** @todo testcase: aad? */
10237 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
10238 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
10239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10240 IEMOP_HLP_NO_64BIT();
10241 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aad, bImm);
10242}
10243
10244
10245/**
10246 * @opcode 0xd6
10247 */
10248FNIEMOP_DEF(iemOp_salc)
10249{
10250 IEMOP_MNEMONIC(salc, "salc");
10251 IEMOP_HLP_NO_64BIT();
10252
10253 IEM_MC_BEGIN(0, 0);
10254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10255 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10256 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
10257 } IEM_MC_ELSE() {
10258 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
10259 } IEM_MC_ENDIF();
10260 IEM_MC_ADVANCE_RIP_AND_FINISH();
10261 IEM_MC_END();
10262}
10263
10264
10265/**
10266 * @opcode 0xd7
10267 */
10268FNIEMOP_DEF(iemOp_xlat)
10269{
10270 IEMOP_MNEMONIC(xlat, "xlat");
10271 switch (pVCpu->iem.s.enmEffAddrMode)
10272 {
10273 case IEMMODE_16BIT:
10274 IEM_MC_BEGIN(0, 0);
10275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10276 IEM_MC_LOCAL(uint8_t, u8Tmp);
10277 IEM_MC_LOCAL(uint16_t, u16Addr);
10278 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
10279 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
10280 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
10281 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10282 IEM_MC_ADVANCE_RIP_AND_FINISH();
10283 IEM_MC_END();
10284 break;
10285
10286 case IEMMODE_32BIT:
10287 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10289 IEM_MC_LOCAL(uint8_t, u8Tmp);
10290 IEM_MC_LOCAL(uint32_t, u32Addr);
10291 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
10292 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
10293 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
10294 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10295 IEM_MC_ADVANCE_RIP_AND_FINISH();
10296 IEM_MC_END();
10297 break;
10298
10299 case IEMMODE_64BIT:
10300 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10302 IEM_MC_LOCAL(uint8_t, u8Tmp);
10303 IEM_MC_LOCAL(uint64_t, u64Addr);
10304 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
10305 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
10306 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
10307 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10308 IEM_MC_ADVANCE_RIP_AND_FINISH();
10309 IEM_MC_END();
10310 break;
10311
10312 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10313 }
10314}
10315
10316
10317/**
10318 * Common worker for FPU instructions working on ST0 and STn, and storing the
10319 * result in ST0.
10320 *
10321 * @param bRm Mod R/M byte.
10322 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10323 */
10324FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10325{
10326 IEM_MC_BEGIN(0, 0);
10327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10328 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10329 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10330 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10331 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10332
10333 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10334 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10335 IEM_MC_PREPARE_FPU_USAGE();
10336 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10337 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10338 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10339 } IEM_MC_ELSE() {
10340 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10341 } IEM_MC_ENDIF();
10342 IEM_MC_ADVANCE_RIP_AND_FINISH();
10343
10344 IEM_MC_END();
10345}
10346
10347
10348/**
10349 * Common worker for FPU instructions working on ST0 and STn, and only affecting
10350 * flags.
10351 *
10352 * @param bRm Mod R/M byte.
10353 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10354 */
10355FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10356{
10357 IEM_MC_BEGIN(0, 0);
10358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10359 IEM_MC_LOCAL(uint16_t, u16Fsw);
10360 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10361 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10362 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10363
10364 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10365 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10366 IEM_MC_PREPARE_FPU_USAGE();
10367 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10368 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10369 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10370 } IEM_MC_ELSE() {
10371 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10372 } IEM_MC_ENDIF();
10373 IEM_MC_ADVANCE_RIP_AND_FINISH();
10374
10375 IEM_MC_END();
10376}
10377
10378
10379/**
10380 * Common worker for FPU instructions working on ST0 and STn, only affecting
10381 * flags, and popping when done.
10382 *
10383 * @param bRm Mod R/M byte.
10384 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10385 */
10386FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10387{
10388 IEM_MC_BEGIN(0, 0);
10389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10390 IEM_MC_LOCAL(uint16_t, u16Fsw);
10391 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10392 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10393 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10394
10395 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10396 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10397 IEM_MC_PREPARE_FPU_USAGE();
10398 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10399 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10400 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10401 } IEM_MC_ELSE() {
10402 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10403 } IEM_MC_ENDIF();
10404 IEM_MC_ADVANCE_RIP_AND_FINISH();
10405
10406 IEM_MC_END();
10407}
10408
10409
10410/** Opcode 0xd8 11/0. */
10411FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
10412{
10413 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
10414 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
10415}
10416
10417
10418/** Opcode 0xd8 11/1. */
10419FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
10420{
10421 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
10422 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
10423}
10424
10425
10426/** Opcode 0xd8 11/2. */
10427FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
10428{
10429 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
10430 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
10431}
10432
10433
10434/** Opcode 0xd8 11/3. */
10435FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
10436{
10437 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
10438 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
10439}
10440
10441
10442/** Opcode 0xd8 11/4. */
10443FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
10444{
10445 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
10446 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
10447}
10448
10449
10450/** Opcode 0xd8 11/5. */
10451FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
10452{
10453 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
10454 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
10455}
10456
10457
10458/** Opcode 0xd8 11/6. */
10459FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
10460{
10461 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
10462 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
10463}
10464
10465
10466/** Opcode 0xd8 11/7. */
10467FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
10468{
10469 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
10470 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
10471}
10472
10473
10474/**
10475 * Common worker for FPU instructions working on ST0 and an m32r, and storing
10476 * the result in ST0.
10477 *
10478 * @param bRm Mod R/M byte.
10479 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10480 */
10481FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
10482{
10483 IEM_MC_BEGIN(0, 0);
10484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10485 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10486 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10487 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10488 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10489 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10490
10491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10493
10494 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10495 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10496 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10497
10498 IEM_MC_PREPARE_FPU_USAGE();
10499 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10500 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
10501 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10502 } IEM_MC_ELSE() {
10503 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10504 } IEM_MC_ENDIF();
10505 IEM_MC_ADVANCE_RIP_AND_FINISH();
10506
10507 IEM_MC_END();
10508}
10509
10510
10511/** Opcode 0xd8 !11/0. */
10512FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
10513{
10514 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
10515 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
10516}
10517
10518
10519/** Opcode 0xd8 !11/1. */
10520FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
10521{
10522 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
10523 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
10524}
10525
10526
10527/** Opcode 0xd8 !11/2. */
10528FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
10529{
10530 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
10531
10532 IEM_MC_BEGIN(0, 0);
10533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10534 IEM_MC_LOCAL(uint16_t, u16Fsw);
10535 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10536 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10537 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10538 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10539
10540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10542
10543 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10544 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10545 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10546
10547 IEM_MC_PREPARE_FPU_USAGE();
10548 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10549 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10550 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10551 } IEM_MC_ELSE() {
10552 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10553 } IEM_MC_ENDIF();
10554 IEM_MC_ADVANCE_RIP_AND_FINISH();
10555
10556 IEM_MC_END();
10557}
10558
10559
10560/** Opcode 0xd8 !11/3. */
10561FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
10562{
10563 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
10564
10565 IEM_MC_BEGIN(0, 0);
10566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10567 IEM_MC_LOCAL(uint16_t, u16Fsw);
10568 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10569 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10570 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10571 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10572
10573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10575
10576 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10577 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10578 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10579
10580 IEM_MC_PREPARE_FPU_USAGE();
10581 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10582 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10583 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10584 } IEM_MC_ELSE() {
10585 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10586 } IEM_MC_ENDIF();
10587 IEM_MC_ADVANCE_RIP_AND_FINISH();
10588
10589 IEM_MC_END();
10590}
10591
10592
10593/** Opcode 0xd8 !11/4. */
10594FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
10595{
10596 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
10597 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
10598}
10599
10600
10601/** Opcode 0xd8 !11/5. */
10602FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
10603{
10604 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
10605 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
10606}
10607
10608
10609/** Opcode 0xd8 !11/6. */
10610FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
10611{
10612 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
10613 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
10614}
10615
10616
10617/** Opcode 0xd8 !11/7. */
10618FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
10619{
10620 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
10621 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
10622}
10623
10624
10625/**
10626 * @opcode 0xd8
10627 */
10628FNIEMOP_DEF(iemOp_EscF0)
10629{
10630 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10631 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
10632
10633 if (IEM_IS_MODRM_REG_MODE(bRm))
10634 {
10635 switch (IEM_GET_MODRM_REG_8(bRm))
10636 {
10637 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
10638 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
10639 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
10640 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
10641 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
10642 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
10643 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
10644 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
10645 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10646 }
10647 }
10648 else
10649 {
10650 switch (IEM_GET_MODRM_REG_8(bRm))
10651 {
10652 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
10653 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
10654 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
10655 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
10656 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
10657 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
10658 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
10659 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
10660 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10661 }
10662 }
10663}
10664
10665
10666/** Opcode 0xd9 /0 mem32real
10667 * @sa iemOp_fld_m64r */
10668FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
10669{
10670 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
10671
10672 IEM_MC_BEGIN(0, 0);
10673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10674 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10675 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
10676 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10677 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
10678
10679 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10681
10682 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10683 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10684 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10685 IEM_MC_PREPARE_FPU_USAGE();
10686 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10687 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
10688 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10689 } IEM_MC_ELSE() {
10690 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10691 } IEM_MC_ENDIF();
10692 IEM_MC_ADVANCE_RIP_AND_FINISH();
10693
10694 IEM_MC_END();
10695}
10696
10697
10698/** Opcode 0xd9 !11/2 mem32real */
10699FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
10700{
10701 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
10702 IEM_MC_BEGIN(0, 0);
10703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10705
10706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10707 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10708 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10709 IEM_MC_PREPARE_FPU_USAGE();
10710
10711 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10712 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10713 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10714
10715 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10716 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10717 IEM_MC_LOCAL(uint16_t, u16Fsw);
10718 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10719 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10720 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10721 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10722 } IEM_MC_ELSE() {
10723 IEM_MC_IF_FCW_IM() {
10724 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10725 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10726 } IEM_MC_ELSE() {
10727 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10728 } IEM_MC_ENDIF();
10729 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10730 } IEM_MC_ENDIF();
10731 IEM_MC_ADVANCE_RIP_AND_FINISH();
10732
10733 IEM_MC_END();
10734}
10735
10736
10737/** Opcode 0xd9 !11/3 */
10738FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
10739{
10740 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
10741 IEM_MC_BEGIN(0, 0);
10742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10744
10745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10746 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10747 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10748 IEM_MC_PREPARE_FPU_USAGE();
10749
10750 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10751 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10752 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10753
10754 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10755 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10756 IEM_MC_LOCAL(uint16_t, u16Fsw);
10757 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10758 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10759 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10760 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10761 } IEM_MC_ELSE() {
10762 IEM_MC_IF_FCW_IM() {
10763 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10764 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10765 } IEM_MC_ELSE() {
10766 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10767 } IEM_MC_ENDIF();
10768 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10769 } IEM_MC_ENDIF();
10770 IEM_MC_ADVANCE_RIP_AND_FINISH();
10771
10772 IEM_MC_END();
10773}
10774
10775
10776/** Opcode 0xd9 !11/4 */
10777FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
10778{
10779 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
10780 IEM_MC_BEGIN(0, 0);
10781 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
10782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10783
10784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10785 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10786 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10787
10788 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10789 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
10790 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw),
10791 iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
10792 IEM_MC_END();
10793}
10794
10795
10796/** Opcode 0xd9 !11/5 */
10797FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
10798{
10799 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
10800 IEM_MC_BEGIN(0, 0);
10801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10803
10804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10805 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10806 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10807
10808 IEM_MC_ARG(uint16_t, u16Fsw, 0);
10809 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10810
10811 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw),
10812 iemCImpl_fldcw, u16Fsw);
10813 IEM_MC_END();
10814}
10815
10816
10817/** Opcode 0xd9 !11/6 */
10818FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
10819{
10820 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
10821 IEM_MC_BEGIN(0, 0);
10822 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
10823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10824
10825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10826 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10827 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10828
10829 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10830 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
10831 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
10832 iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
10833 IEM_MC_END();
10834}
10835
10836
10837/** Opcode 0xd9 !11/7 */
10838FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
10839{
10840 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
10841 IEM_MC_BEGIN(0, 0);
10842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10843 IEM_MC_LOCAL(uint16_t, u16Fcw);
10844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10846 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10847 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10848 IEM_MC_FETCH_FCW(u16Fcw);
10849 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
10850 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10851 IEM_MC_END();
10852}
10853
10854
10855/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
10856FNIEMOP_DEF(iemOp_fnop)
10857{
10858 IEMOP_MNEMONIC(fnop, "fnop");
10859 IEM_MC_BEGIN(0, 0);
10860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10861 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10862 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10863 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10864 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
10865 * intel optimizations. Investigate. */
10866 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10867 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10868 IEM_MC_END();
10869}
10870
10871
10872/** Opcode 0xd9 11/0 stN */
10873FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
10874{
10875 IEMOP_MNEMONIC(fld_stN, "fld stN");
10876 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10877 * indicates that it does. */
10878 IEM_MC_BEGIN(0, 0);
10879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10880 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10881 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10882 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10883 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10884
10885 IEM_MC_PREPARE_FPU_USAGE();
10886 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
10887 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10888 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10889 } IEM_MC_ELSE() {
10890 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
10891 } IEM_MC_ENDIF();
10892
10893 IEM_MC_ADVANCE_RIP_AND_FINISH();
10894 IEM_MC_END();
10895}
10896
10897
10898/** Opcode 0xd9 11/3 stN */
10899FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
10900{
10901 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
10902 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10903 * indicates that it does. */
10904 IEM_MC_BEGIN(0, 0);
10905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10906 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
10907 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
10908 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10909 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
10910 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
10911 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10912 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10913
10914 IEM_MC_PREPARE_FPU_USAGE();
10915 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10916 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
10917 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
10918 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10919 } IEM_MC_ELSE() {
10920 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
10921 } IEM_MC_ENDIF();
10922
10923 IEM_MC_ADVANCE_RIP_AND_FINISH();
10924 IEM_MC_END();
10925}
10926
10927
10928/** Opcode 0xd9 11/4, 0xdd 11/2. */
10929FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
10930{
10931 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
10932
10933 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
10934 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
10935 if (!iDstReg)
10936 {
10937 IEM_MC_BEGIN(0, 0);
10938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10939 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
10940 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10941 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10942
10943 IEM_MC_PREPARE_FPU_USAGE();
10944 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
10945 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10946 } IEM_MC_ELSE() {
10947 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
10948 } IEM_MC_ENDIF();
10949
10950 IEM_MC_ADVANCE_RIP_AND_FINISH();
10951 IEM_MC_END();
10952 }
10953 else
10954 {
10955 IEM_MC_BEGIN(0, 0);
10956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10957 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10958 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10959 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10960 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10961
10962 IEM_MC_PREPARE_FPU_USAGE();
10963 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10964 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10965 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
10966 } IEM_MC_ELSE() {
10967 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
10968 } IEM_MC_ENDIF();
10969
10970 IEM_MC_ADVANCE_RIP_AND_FINISH();
10971 IEM_MC_END();
10972 }
10973}
10974
10975
10976/**
10977 * Common worker for FPU instructions working on ST0 and replaces it with the
10978 * result, i.e. unary operators.
10979 *
10980 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10981 */
10982FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
10983{
10984 IEM_MC_BEGIN(0, 0);
10985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10986 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10987 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10988 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10989
10990 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10991 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10992 IEM_MC_PREPARE_FPU_USAGE();
10993 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10994 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
10995 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10996 } IEM_MC_ELSE() {
10997 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10998 } IEM_MC_ENDIF();
10999 IEM_MC_ADVANCE_RIP_AND_FINISH();
11000
11001 IEM_MC_END();
11002}
11003
11004
11005/** Opcode 0xd9 0xe0. */
11006FNIEMOP_DEF(iemOp_fchs)
11007{
11008 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
11009 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
11010}
11011
11012
11013/** Opcode 0xd9 0xe1. */
11014FNIEMOP_DEF(iemOp_fabs)
11015{
11016 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
11017 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
11018}
11019
11020
11021/** Opcode 0xd9 0xe4. */
11022FNIEMOP_DEF(iemOp_ftst)
11023{
11024 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
11025 IEM_MC_BEGIN(0, 0);
11026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11027 IEM_MC_LOCAL(uint16_t, u16Fsw);
11028 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11029 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11030
11031 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11032 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11033 IEM_MC_PREPARE_FPU_USAGE();
11034 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11035 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
11036 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11037 } IEM_MC_ELSE() {
11038 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
11039 } IEM_MC_ENDIF();
11040 IEM_MC_ADVANCE_RIP_AND_FINISH();
11041
11042 IEM_MC_END();
11043}
11044
11045
11046/** Opcode 0xd9 0xe5. */
11047FNIEMOP_DEF(iemOp_fxam)
11048{
11049 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
11050 IEM_MC_BEGIN(0, 0);
11051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11052 IEM_MC_LOCAL(uint16_t, u16Fsw);
11053 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11054 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11055
11056 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11057 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11058 IEM_MC_PREPARE_FPU_USAGE();
11059 IEM_MC_REF_FPUREG(pr80Value, 0);
11060 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
11061 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11062 IEM_MC_ADVANCE_RIP_AND_FINISH();
11063
11064 IEM_MC_END();
11065}
11066
11067
11068/**
11069 * Common worker for FPU instructions pushing a constant onto the FPU stack.
11070 *
11071 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11072 */
11073FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
11074{
11075 IEM_MC_BEGIN(0, 0);
11076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11077 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11078 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11079
11080 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11081 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11082 IEM_MC_PREPARE_FPU_USAGE();
11083 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11084 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
11085 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
11086 } IEM_MC_ELSE() {
11087 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
11088 } IEM_MC_ENDIF();
11089 IEM_MC_ADVANCE_RIP_AND_FINISH();
11090
11091 IEM_MC_END();
11092}
11093
11094
11095/** Opcode 0xd9 0xe8. */
11096FNIEMOP_DEF(iemOp_fld1)
11097{
11098 IEMOP_MNEMONIC(fld1, "fld1");
11099 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
11100}
11101
11102
11103/** Opcode 0xd9 0xe9. */
11104FNIEMOP_DEF(iemOp_fldl2t)
11105{
11106 IEMOP_MNEMONIC(fldl2t, "fldl2t");
11107 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
11108}
11109
11110
11111/** Opcode 0xd9 0xea. */
11112FNIEMOP_DEF(iemOp_fldl2e)
11113{
11114 IEMOP_MNEMONIC(fldl2e, "fldl2e");
11115 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
11116}
11117
11118/** Opcode 0xd9 0xeb. */
11119FNIEMOP_DEF(iemOp_fldpi)
11120{
11121 IEMOP_MNEMONIC(fldpi, "fldpi");
11122 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
11123}
11124
11125
11126/** Opcode 0xd9 0xec. */
11127FNIEMOP_DEF(iemOp_fldlg2)
11128{
11129 IEMOP_MNEMONIC(fldlg2, "fldlg2");
11130 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
11131}
11132
11133/** Opcode 0xd9 0xed. */
11134FNIEMOP_DEF(iemOp_fldln2)
11135{
11136 IEMOP_MNEMONIC(fldln2, "fldln2");
11137 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
11138}
11139
11140
11141/** Opcode 0xd9 0xee. */
11142FNIEMOP_DEF(iemOp_fldz)
11143{
11144 IEMOP_MNEMONIC(fldz, "fldz");
11145 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
11146}
11147
11148
11149/** Opcode 0xd9 0xf0.
11150 *
11151 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
11152 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
11153 * to produce proper results for +Inf and -Inf.
11154 *
11155 * This is probably usful in the implementation pow() and similar.
11156 */
11157FNIEMOP_DEF(iemOp_f2xm1)
11158{
11159 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
11160 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
11161}
11162
11163
11164/**
11165 * Common worker for FPU instructions working on STn and ST0, storing the result
11166 * in STn, and popping the stack unless IE, DE or ZE was raised.
11167 *
11168 * @param bRm Mod R/M byte.
11169 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11170 */
11171FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11172{
11173 IEM_MC_BEGIN(0, 0);
11174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11175 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11176 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11177 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11178 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11179
11180 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11181 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11182
11183 IEM_MC_PREPARE_FPU_USAGE();
11184 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
11185 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11186 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11187 } IEM_MC_ELSE() {
11188 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11189 } IEM_MC_ENDIF();
11190 IEM_MC_ADVANCE_RIP_AND_FINISH();
11191
11192 IEM_MC_END();
11193}
11194
11195
11196/** Opcode 0xd9 0xf1. */
11197FNIEMOP_DEF(iemOp_fyl2x)
11198{
11199 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
11200 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
11201}
11202
11203
11204/**
11205 * Common worker for FPU instructions working on ST0 and having two outputs, one
11206 * replacing ST0 and one pushed onto the stack.
11207 *
11208 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11209 */
11210FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
11211{
11212 IEM_MC_BEGIN(0, 0);
11213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11214 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
11215 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
11216 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11217
11218 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11219 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11220 IEM_MC_PREPARE_FPU_USAGE();
11221 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11222 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
11223 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
11224 } IEM_MC_ELSE() {
11225 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
11226 } IEM_MC_ENDIF();
11227 IEM_MC_ADVANCE_RIP_AND_FINISH();
11228
11229 IEM_MC_END();
11230}
11231
11232
11233/** Opcode 0xd9 0xf2. */
11234FNIEMOP_DEF(iemOp_fptan)
11235{
11236 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
11237 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
11238}
11239
11240
11241/** Opcode 0xd9 0xf3. */
11242FNIEMOP_DEF(iemOp_fpatan)
11243{
11244 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
11245 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
11246}
11247
11248
11249/** Opcode 0xd9 0xf4. */
11250FNIEMOP_DEF(iemOp_fxtract)
11251{
11252 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
11253 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
11254}
11255
11256
11257/** Opcode 0xd9 0xf5. */
11258FNIEMOP_DEF(iemOp_fprem1)
11259{
11260 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
11261 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
11262}
11263
11264
11265/** Opcode 0xd9 0xf6. */
11266FNIEMOP_DEF(iemOp_fdecstp)
11267{
11268 IEMOP_MNEMONIC(fdecstp, "fdecstp");
11269 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
11270 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
11271 * FINCSTP and FDECSTP. */
11272 IEM_MC_BEGIN(0, 0);
11273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11274
11275 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11276 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11277
11278 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11279 IEM_MC_FPU_STACK_DEC_TOP();
11280 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
11281
11282 IEM_MC_ADVANCE_RIP_AND_FINISH();
11283 IEM_MC_END();
11284}
11285
11286
11287/** Opcode 0xd9 0xf7. */
11288FNIEMOP_DEF(iemOp_fincstp)
11289{
11290 IEMOP_MNEMONIC(fincstp, "fincstp");
11291 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
11292 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
11293 * FINCSTP and FDECSTP. */
11294 IEM_MC_BEGIN(0, 0);
11295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11296
11297 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11298 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11299
11300 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11301 IEM_MC_FPU_STACK_INC_TOP();
11302 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
11303
11304 IEM_MC_ADVANCE_RIP_AND_FINISH();
11305 IEM_MC_END();
11306}
11307
11308
11309/** Opcode 0xd9 0xf8. */
11310FNIEMOP_DEF(iemOp_fprem)
11311{
11312 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
11313 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
11314}
11315
11316
11317/** Opcode 0xd9 0xf9. */
11318FNIEMOP_DEF(iemOp_fyl2xp1)
11319{
11320 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
11321 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
11322}
11323
11324
11325/** Opcode 0xd9 0xfa. */
11326FNIEMOP_DEF(iemOp_fsqrt)
11327{
11328 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
11329 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
11330}
11331
11332
11333/** Opcode 0xd9 0xfb. */
11334FNIEMOP_DEF(iemOp_fsincos)
11335{
11336 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
11337 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
11338}
11339
11340
11341/** Opcode 0xd9 0xfc. */
11342FNIEMOP_DEF(iemOp_frndint)
11343{
11344 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
11345 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
11346}
11347
11348
11349/** Opcode 0xd9 0xfd. */
11350FNIEMOP_DEF(iemOp_fscale)
11351{
11352 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
11353 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
11354}
11355
11356
11357/** Opcode 0xd9 0xfe. */
11358FNIEMOP_DEF(iemOp_fsin)
11359{
11360 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
11361 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
11362}
11363
11364
11365/** Opcode 0xd9 0xff. */
11366FNIEMOP_DEF(iemOp_fcos)
11367{
11368 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
11369 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
11370}
11371
11372
11373/** Used by iemOp_EscF1. */
11374IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
11375{
11376 /* 0xe0 */ iemOp_fchs,
11377 /* 0xe1 */ iemOp_fabs,
11378 /* 0xe2 */ iemOp_Invalid,
11379 /* 0xe3 */ iemOp_Invalid,
11380 /* 0xe4 */ iemOp_ftst,
11381 /* 0xe5 */ iemOp_fxam,
11382 /* 0xe6 */ iemOp_Invalid,
11383 /* 0xe7 */ iemOp_Invalid,
11384 /* 0xe8 */ iemOp_fld1,
11385 /* 0xe9 */ iemOp_fldl2t,
11386 /* 0xea */ iemOp_fldl2e,
11387 /* 0xeb */ iemOp_fldpi,
11388 /* 0xec */ iemOp_fldlg2,
11389 /* 0xed */ iemOp_fldln2,
11390 /* 0xee */ iemOp_fldz,
11391 /* 0xef */ iemOp_Invalid,
11392 /* 0xf0 */ iemOp_f2xm1,
11393 /* 0xf1 */ iemOp_fyl2x,
11394 /* 0xf2 */ iemOp_fptan,
11395 /* 0xf3 */ iemOp_fpatan,
11396 /* 0xf4 */ iemOp_fxtract,
11397 /* 0xf5 */ iemOp_fprem1,
11398 /* 0xf6 */ iemOp_fdecstp,
11399 /* 0xf7 */ iemOp_fincstp,
11400 /* 0xf8 */ iemOp_fprem,
11401 /* 0xf9 */ iemOp_fyl2xp1,
11402 /* 0xfa */ iemOp_fsqrt,
11403 /* 0xfb */ iemOp_fsincos,
11404 /* 0xfc */ iemOp_frndint,
11405 /* 0xfd */ iemOp_fscale,
11406 /* 0xfe */ iemOp_fsin,
11407 /* 0xff */ iemOp_fcos
11408};
11409
11410
11411/**
11412 * @opcode 0xd9
11413 */
11414FNIEMOP_DEF(iemOp_EscF1)
11415{
11416 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11417 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
11418
11419 if (IEM_IS_MODRM_REG_MODE(bRm))
11420 {
11421 switch (IEM_GET_MODRM_REG_8(bRm))
11422 {
11423 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
11424 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
11425 case 2:
11426 if (bRm == 0xd0)
11427 return FNIEMOP_CALL(iemOp_fnop);
11428 IEMOP_RAISE_INVALID_OPCODE_RET();
11429 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
11430 case 4:
11431 case 5:
11432 case 6:
11433 case 7:
11434 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
11435 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
11436 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11437 }
11438 }
11439 else
11440 {
11441 switch (IEM_GET_MODRM_REG_8(bRm))
11442 {
11443 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
11444 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
11445 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
11446 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
11447 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
11448 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
11449 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
11450 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
11451 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11452 }
11453 }
11454}
11455
11456
11457/** Opcode 0xda 11/0. */
11458FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
11459{
11460 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
11461 IEM_MC_BEGIN(0, 0);
11462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11463 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11464
11465 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11466 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11467
11468 IEM_MC_PREPARE_FPU_USAGE();
11469 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11470 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
11471 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11472 } IEM_MC_ENDIF();
11473 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11474 } IEM_MC_ELSE() {
11475 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11476 } IEM_MC_ENDIF();
11477 IEM_MC_ADVANCE_RIP_AND_FINISH();
11478
11479 IEM_MC_END();
11480}
11481
11482
11483/** Opcode 0xda 11/1. */
11484FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
11485{
11486 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
11487 IEM_MC_BEGIN(0, 0);
11488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11489 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11490
11491 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11492 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11493
11494 IEM_MC_PREPARE_FPU_USAGE();
11495 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11496 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
11497 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11498 } IEM_MC_ENDIF();
11499 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11500 } IEM_MC_ELSE() {
11501 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11502 } IEM_MC_ENDIF();
11503 IEM_MC_ADVANCE_RIP_AND_FINISH();
11504
11505 IEM_MC_END();
11506}
11507
11508
11509/** Opcode 0xda 11/2. */
11510FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
11511{
11512 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
11513 IEM_MC_BEGIN(0, 0);
11514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11515 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11516
11517 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11518 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11519
11520 IEM_MC_PREPARE_FPU_USAGE();
11521 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11522 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11523 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11524 } IEM_MC_ENDIF();
11525 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11526 } IEM_MC_ELSE() {
11527 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11528 } IEM_MC_ENDIF();
11529 IEM_MC_ADVANCE_RIP_AND_FINISH();
11530
11531 IEM_MC_END();
11532}
11533
11534
11535/** Opcode 0xda 11/3. */
11536FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
11537{
11538 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
11539 IEM_MC_BEGIN(0, 0);
11540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11541 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11542
11543 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11544 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11545
11546 IEM_MC_PREPARE_FPU_USAGE();
11547 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11548 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
11549 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11550 } IEM_MC_ENDIF();
11551 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11552 } IEM_MC_ELSE() {
11553 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11554 } IEM_MC_ENDIF();
11555 IEM_MC_ADVANCE_RIP_AND_FINISH();
11556
11557 IEM_MC_END();
11558}
11559
11560
11561/**
11562 * Common worker for FPU instructions working on ST0 and ST1, only affecting
11563 * flags, and popping twice when done.
11564 *
11565 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11566 */
11567FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
11568{
11569 IEM_MC_BEGIN(0, 0);
11570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11571 IEM_MC_LOCAL(uint16_t, u16Fsw);
11572 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11573 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11574 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11575
11576 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11577 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11578
11579 IEM_MC_PREPARE_FPU_USAGE();
11580 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
11581 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
11582 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11583 } IEM_MC_ELSE() {
11584 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
11585 } IEM_MC_ENDIF();
11586 IEM_MC_ADVANCE_RIP_AND_FINISH();
11587
11588 IEM_MC_END();
11589}
11590
11591
11592/** Opcode 0xda 0xe9. */
11593FNIEMOP_DEF(iemOp_fucompp)
11594{
11595 IEMOP_MNEMONIC(fucompp, "fucompp");
11596 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
11597}
11598
11599
11600/**
11601 * Common worker for FPU instructions working on ST0 and an m32i, and storing
11602 * the result in ST0.
11603 *
11604 * @param bRm Mod R/M byte.
11605 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11606 */
11607FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
11608{
11609 IEM_MC_BEGIN(0, 0);
11610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11611 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11612 IEM_MC_LOCAL(int32_t, i32Val2);
11613 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11614 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11615 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11616
11617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11619
11620 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11621 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11622 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11623
11624 IEM_MC_PREPARE_FPU_USAGE();
11625 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11626 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
11627 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11628 } IEM_MC_ELSE() {
11629 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11630 } IEM_MC_ENDIF();
11631 IEM_MC_ADVANCE_RIP_AND_FINISH();
11632
11633 IEM_MC_END();
11634}
11635
11636
11637/** Opcode 0xda !11/0. */
11638FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
11639{
11640 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
11641 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
11642}
11643
11644
11645/** Opcode 0xda !11/1. */
11646FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
11647{
11648 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
11649 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
11650}
11651
11652
11653/** Opcode 0xda !11/2. */
11654FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
11655{
11656 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
11657
11658 IEM_MC_BEGIN(0, 0);
11659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11660 IEM_MC_LOCAL(uint16_t, u16Fsw);
11661 IEM_MC_LOCAL(int32_t, i32Val2);
11662 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11663 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11664 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11665
11666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11668
11669 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11670 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11671 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11672
11673 IEM_MC_PREPARE_FPU_USAGE();
11674 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11675 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11676 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11677 } IEM_MC_ELSE() {
11678 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11679 } IEM_MC_ENDIF();
11680 IEM_MC_ADVANCE_RIP_AND_FINISH();
11681
11682 IEM_MC_END();
11683}
11684
11685
11686/** Opcode 0xda !11/3. */
11687FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
11688{
11689 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
11690
11691 IEM_MC_BEGIN(0, 0);
11692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11693 IEM_MC_LOCAL(uint16_t, u16Fsw);
11694 IEM_MC_LOCAL(int32_t, i32Val2);
11695 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11696 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11697 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11698
11699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11701
11702 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11703 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11704 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11705
11706 IEM_MC_PREPARE_FPU_USAGE();
11707 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11708 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11709 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11710 } IEM_MC_ELSE() {
11711 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11712 } IEM_MC_ENDIF();
11713 IEM_MC_ADVANCE_RIP_AND_FINISH();
11714
11715 IEM_MC_END();
11716}
11717
11718
11719/** Opcode 0xda !11/4. */
11720FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
11721{
11722 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
11723 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
11724}
11725
11726
11727/** Opcode 0xda !11/5. */
11728FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
11729{
11730 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
11731 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
11732}
11733
11734
11735/** Opcode 0xda !11/6. */
11736FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
11737{
11738 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
11739 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
11740}
11741
11742
11743/** Opcode 0xda !11/7. */
11744FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
11745{
11746 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
11747 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
11748}
11749
11750
11751/**
11752 * @opcode 0xda
11753 */
11754FNIEMOP_DEF(iemOp_EscF2)
11755{
11756 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11757 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
11758 if (IEM_IS_MODRM_REG_MODE(bRm))
11759 {
11760 switch (IEM_GET_MODRM_REG_8(bRm))
11761 {
11762 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
11763 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
11764 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
11765 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
11766 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11767 case 5:
11768 if (bRm == 0xe9)
11769 return FNIEMOP_CALL(iemOp_fucompp);
11770 IEMOP_RAISE_INVALID_OPCODE_RET();
11771 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11772 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11773 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11774 }
11775 }
11776 else
11777 {
11778 switch (IEM_GET_MODRM_REG_8(bRm))
11779 {
11780 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
11781 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
11782 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
11783 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
11784 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
11785 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
11786 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
11787 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
11788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11789 }
11790 }
11791}
11792
11793
11794/** Opcode 0xdb !11/0. */
11795FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
11796{
11797 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
11798
11799 IEM_MC_BEGIN(0, 0);
11800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11801 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11802 IEM_MC_LOCAL(int32_t, i32Val);
11803 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11804 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
11805
11806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11808
11809 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11810 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11811 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11812
11813 IEM_MC_PREPARE_FPU_USAGE();
11814 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11815 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
11816 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11817 } IEM_MC_ELSE() {
11818 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11819 } IEM_MC_ENDIF();
11820 IEM_MC_ADVANCE_RIP_AND_FINISH();
11821
11822 IEM_MC_END();
11823}
11824
11825
11826/** Opcode 0xdb !11/1. */
11827FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
11828{
11829 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
11830 IEM_MC_BEGIN(0, 0);
11831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11833
11834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11835 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11836 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11837 IEM_MC_PREPARE_FPU_USAGE();
11838
11839 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11840 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11841 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11842
11843 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11844 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11845 IEM_MC_LOCAL(uint16_t, u16Fsw);
11846 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11847 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11848 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11849 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11850 } IEM_MC_ELSE() {
11851 IEM_MC_IF_FCW_IM() {
11852 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11853 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11854 } IEM_MC_ELSE() {
11855 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11856 } IEM_MC_ENDIF();
11857 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11858 } IEM_MC_ENDIF();
11859 IEM_MC_ADVANCE_RIP_AND_FINISH();
11860
11861 IEM_MC_END();
11862}
11863
11864
11865/** Opcode 0xdb !11/2. */
11866FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
11867{
11868 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
11869 IEM_MC_BEGIN(0, 0);
11870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11871 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11872
11873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11874 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11875 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11876 IEM_MC_PREPARE_FPU_USAGE();
11877
11878 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11879 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11880 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11881
11882 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11883 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11884 IEM_MC_LOCAL(uint16_t, u16Fsw);
11885 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11886 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11887 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11888 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11889 } IEM_MC_ELSE() {
11890 IEM_MC_IF_FCW_IM() {
11891 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11892 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11893 } IEM_MC_ELSE() {
11894 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11895 } IEM_MC_ENDIF();
11896 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11897 } IEM_MC_ENDIF();
11898 IEM_MC_ADVANCE_RIP_AND_FINISH();
11899
11900 IEM_MC_END();
11901}
11902
11903
11904/** Opcode 0xdb !11/3. */
11905FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
11906{
11907 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
11908 IEM_MC_BEGIN(0, 0);
11909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11911
11912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11913 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11914 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11915 IEM_MC_PREPARE_FPU_USAGE();
11916
11917 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11918 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11919 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11920
11921 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11922 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11923 IEM_MC_LOCAL(uint16_t, u16Fsw);
11924 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11925 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11926 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11927 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11928 } IEM_MC_ELSE() {
11929 IEM_MC_IF_FCW_IM() {
11930 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11931 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11932 } IEM_MC_ELSE() {
11933 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11934 } IEM_MC_ENDIF();
11935 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11936 } IEM_MC_ENDIF();
11937 IEM_MC_ADVANCE_RIP_AND_FINISH();
11938
11939 IEM_MC_END();
11940}
11941
11942
11943/** Opcode 0xdb !11/5. */
11944FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
11945{
11946 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
11947
11948 IEM_MC_BEGIN(0, 0);
11949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11950 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11951 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
11952 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11953 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
11954
11955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11957
11958 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11959 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11960 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11961
11962 IEM_MC_PREPARE_FPU_USAGE();
11963 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11964 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
11965 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11966 } IEM_MC_ELSE() {
11967 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11968 } IEM_MC_ENDIF();
11969 IEM_MC_ADVANCE_RIP_AND_FINISH();
11970
11971 IEM_MC_END();
11972}
11973
11974
11975/** Opcode 0xdb !11/7. */
11976FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
11977{
11978 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
11979 IEM_MC_BEGIN(0, 0);
11980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11981 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11982
11983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11984 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11985 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11986 IEM_MC_PREPARE_FPU_USAGE();
11987
11988 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11989 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
11990 IEM_MC_MEM_MAP_R80_WO(pr80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11991
11992 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11993 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11994 IEM_MC_LOCAL(uint16_t, u16Fsw);
11995 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11996 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
11997 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11998 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11999 } IEM_MC_ELSE() {
12000 IEM_MC_IF_FCW_IM() {
12001 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
12002 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12003 } IEM_MC_ELSE() {
12004 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12005 } IEM_MC_ENDIF();
12006 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12007 } IEM_MC_ENDIF();
12008 IEM_MC_ADVANCE_RIP_AND_FINISH();
12009
12010 IEM_MC_END();
12011}
12012
12013
12014/** Opcode 0xdb 11/0. */
12015FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
12016{
12017 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
12018 IEM_MC_BEGIN(0, 0);
12019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12020 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12021
12022 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12023 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12024
12025 IEM_MC_PREPARE_FPU_USAGE();
12026 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
12027 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
12028 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12029 } IEM_MC_ENDIF();
12030 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12031 } IEM_MC_ELSE() {
12032 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12033 } IEM_MC_ENDIF();
12034 IEM_MC_ADVANCE_RIP_AND_FINISH();
12035
12036 IEM_MC_END();
12037}
12038
12039
12040/** Opcode 0xdb 11/1. */
12041FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
12042{
12043 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
12044 IEM_MC_BEGIN(0, 0);
12045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12046 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12047
12048 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12049 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12050
12051 IEM_MC_PREPARE_FPU_USAGE();
12052 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
12053 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12054 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12055 } IEM_MC_ENDIF();
12056 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12057 } IEM_MC_ELSE() {
12058 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12059 } IEM_MC_ENDIF();
12060 IEM_MC_ADVANCE_RIP_AND_FINISH();
12061
12062 IEM_MC_END();
12063}
12064
12065
12066/** Opcode 0xdb 11/2. */
12067FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
12068{
12069 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
12070 IEM_MC_BEGIN(0, 0);
12071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12072 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12073
12074 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12075 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12076
12077 IEM_MC_PREPARE_FPU_USAGE();
12078 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
12079 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
12080 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12081 } IEM_MC_ENDIF();
12082 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12083 } IEM_MC_ELSE() {
12084 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12085 } IEM_MC_ENDIF();
12086 IEM_MC_ADVANCE_RIP_AND_FINISH();
12087
12088 IEM_MC_END();
12089}
12090
12091
12092/** Opcode 0xdb 11/3. */
12093FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
12094{
12095 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
12096 IEM_MC_BEGIN(0, 0);
12097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12098 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12099
12100 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12101 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12102
12103 IEM_MC_PREPARE_FPU_USAGE();
12104 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
12105 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
12106 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12107 } IEM_MC_ENDIF();
12108 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12109 } IEM_MC_ELSE() {
12110 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12111 } IEM_MC_ENDIF();
12112 IEM_MC_ADVANCE_RIP_AND_FINISH();
12113
12114 IEM_MC_END();
12115}
12116
12117
12118/** Opcode 0xdb 0xe0. */
12119FNIEMOP_DEF(iemOp_fneni)
12120{
12121 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
12122 IEM_MC_BEGIN(0, 0);
12123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12124 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12125 IEM_MC_ADVANCE_RIP_AND_FINISH();
12126 IEM_MC_END();
12127}
12128
12129
12130/** Opcode 0xdb 0xe1. */
12131FNIEMOP_DEF(iemOp_fndisi)
12132{
12133 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
12134 IEM_MC_BEGIN(0, 0);
12135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12136 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12137 IEM_MC_ADVANCE_RIP_AND_FINISH();
12138 IEM_MC_END();
12139}
12140
12141
12142/** Opcode 0xdb 0xe2. */
12143FNIEMOP_DEF(iemOp_fnclex)
12144{
12145 IEMOP_MNEMONIC(fnclex, "fnclex");
12146 IEM_MC_BEGIN(0, 0);
12147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12148 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12149 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12150 IEM_MC_CLEAR_FSW_EX();
12151 IEM_MC_ADVANCE_RIP_AND_FINISH();
12152 IEM_MC_END();
12153}
12154
12155
12156/** Opcode 0xdb 0xe3. */
12157FNIEMOP_DEF(iemOp_fninit)
12158{
12159 IEMOP_MNEMONIC(fninit, "fninit");
12160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12161 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12162 iemCImpl_finit, false /*fCheckXcpts*/);
12163}
12164
12165
12166/** Opcode 0xdb 0xe4. */
12167FNIEMOP_DEF(iemOp_fnsetpm)
12168{
12169 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
12170 IEM_MC_BEGIN(0, 0);
12171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12172 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12173 IEM_MC_ADVANCE_RIP_AND_FINISH();
12174 IEM_MC_END();
12175}
12176
12177
12178/** Opcode 0xdb 0xe5. */
12179FNIEMOP_DEF(iemOp_frstpm)
12180{
12181 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
12182#if 0 /* #UDs on newer CPUs */
12183 IEM_MC_BEGIN(0, 0);
12184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12185 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12186 IEM_MC_ADVANCE_RIP_AND_FINISH();
12187 IEM_MC_END();
12188 return VINF_SUCCESS;
12189#else
12190 IEMOP_RAISE_INVALID_OPCODE_RET();
12191#endif
12192}
12193
12194
12195/** Opcode 0xdb 11/5. */
12196FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
12197{
12198 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
12199 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12200 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
12201 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12202}
12203
12204
12205/** Opcode 0xdb 11/6. */
12206FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
12207{
12208 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
12209 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12210 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12211 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12212}
12213
12214
12215/**
12216 * @opcode 0xdb
12217 */
12218FNIEMOP_DEF(iemOp_EscF3)
12219{
12220 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12221 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
12222 if (IEM_IS_MODRM_REG_MODE(bRm))
12223 {
12224 switch (IEM_GET_MODRM_REG_8(bRm))
12225 {
12226 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
12227 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
12228 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
12229 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
12230 case 4:
12231 switch (bRm)
12232 {
12233 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
12234 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
12235 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
12236 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
12237 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
12238 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
12239 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
12240 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
12241 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12242 }
12243 break;
12244 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
12245 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
12246 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12247 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12248 }
12249 }
12250 else
12251 {
12252 switch (IEM_GET_MODRM_REG_8(bRm))
12253 {
12254 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
12255 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
12256 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
12257 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
12258 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
12259 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
12260 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
12261 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
12262 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12263 }
12264 }
12265}
12266
12267
12268/**
12269 * Common worker for FPU instructions working on STn and ST0, and storing the
12270 * result in STn unless IE, DE or ZE was raised.
12271 *
12272 * @param bRm Mod R/M byte.
12273 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12274 */
12275FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
12276{
12277 IEM_MC_BEGIN(0, 0);
12278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12279 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12280 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12281 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12282 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12283
12284 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12285 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12286
12287 IEM_MC_PREPARE_FPU_USAGE();
12288 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
12289 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
12290 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12291 } IEM_MC_ELSE() {
12292 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12293 } IEM_MC_ENDIF();
12294 IEM_MC_ADVANCE_RIP_AND_FINISH();
12295
12296 IEM_MC_END();
12297}
12298
12299
12300/** Opcode 0xdc 11/0. */
12301FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
12302{
12303 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
12304 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
12305}
12306
12307
12308/** Opcode 0xdc 11/1. */
12309FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
12310{
12311 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
12312 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
12313}
12314
12315
12316/** Opcode 0xdc 11/4. */
12317FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
12318{
12319 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
12320 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
12321}
12322
12323
12324/** Opcode 0xdc 11/5. */
12325FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
12326{
12327 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
12328 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
12329}
12330
12331
12332/** Opcode 0xdc 11/6. */
12333FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
12334{
12335 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
12336 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
12337}
12338
12339
12340/** Opcode 0xdc 11/7. */
12341FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
12342{
12343 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
12344 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
12345}
12346
12347
12348/**
12349 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
12350 * memory operand, and storing the result in ST0.
12351 *
12352 * @param bRm Mod R/M byte.
12353 * @param pfnImpl Pointer to the instruction implementation (assembly).
12354 */
12355FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
12356{
12357 IEM_MC_BEGIN(0, 0);
12358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12359 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12360 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
12361 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12362 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
12363 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
12364
12365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12367 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12368 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12369
12370 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12371 IEM_MC_PREPARE_FPU_USAGE();
12372 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
12373 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
12374 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12375 } IEM_MC_ELSE() {
12376 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12377 } IEM_MC_ENDIF();
12378 IEM_MC_ADVANCE_RIP_AND_FINISH();
12379
12380 IEM_MC_END();
12381}
12382
12383
12384/** Opcode 0xdc !11/0. */
12385FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
12386{
12387 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
12388 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
12389}
12390
12391
12392/** Opcode 0xdc !11/1. */
12393FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
12394{
12395 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
12396 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
12397}
12398
12399
12400/** Opcode 0xdc !11/2. */
12401FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
12402{
12403 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
12404
12405 IEM_MC_BEGIN(0, 0);
12406 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12407 IEM_MC_LOCAL(uint16_t, u16Fsw);
12408 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
12409 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12410 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12411 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
12412
12413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12415
12416 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12417 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12418 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12419
12420 IEM_MC_PREPARE_FPU_USAGE();
12421 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12422 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
12423 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12424 } IEM_MC_ELSE() {
12425 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12426 } IEM_MC_ENDIF();
12427 IEM_MC_ADVANCE_RIP_AND_FINISH();
12428
12429 IEM_MC_END();
12430}
12431
12432
12433/** Opcode 0xdc !11/3. */
12434FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
12435{
12436 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
12437
12438 IEM_MC_BEGIN(0, 0);
12439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12440 IEM_MC_LOCAL(uint16_t, u16Fsw);
12441 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
12442 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12443 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12444 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
12445
12446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12448
12449 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12450 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12451 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12452
12453 IEM_MC_PREPARE_FPU_USAGE();
12454 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12455 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
12456 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12457 } IEM_MC_ELSE() {
12458 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12459 } IEM_MC_ENDIF();
12460 IEM_MC_ADVANCE_RIP_AND_FINISH();
12461
12462 IEM_MC_END();
12463}
12464
12465
12466/** Opcode 0xdc !11/4. */
12467FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
12468{
12469 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
12470 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
12471}
12472
12473
12474/** Opcode 0xdc !11/5. */
12475FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
12476{
12477 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
12478 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
12479}
12480
12481
12482/** Opcode 0xdc !11/6. */
12483FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
12484{
12485 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
12486 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
12487}
12488
12489
12490/** Opcode 0xdc !11/7. */
12491FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
12492{
12493 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
12494 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
12495}
12496
12497
12498/**
12499 * @opcode 0xdc
12500 */
12501FNIEMOP_DEF(iemOp_EscF4)
12502{
12503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12504 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
12505 if (IEM_IS_MODRM_REG_MODE(bRm))
12506 {
12507 switch (IEM_GET_MODRM_REG_8(bRm))
12508 {
12509 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
12510 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
12511 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
12512 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
12513 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
12514 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
12515 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
12516 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
12517 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12518 }
12519 }
12520 else
12521 {
12522 switch (IEM_GET_MODRM_REG_8(bRm))
12523 {
12524 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
12525 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
12526 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
12527 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
12528 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
12529 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
12530 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
12531 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
12532 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12533 }
12534 }
12535}
12536
12537
12538/** Opcode 0xdd !11/0.
12539 * @sa iemOp_fld_m32r */
12540FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
12541{
12542 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
12543
12544 IEM_MC_BEGIN(0, 0);
12545 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12546 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12547 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
12548 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12549 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
12550
12551 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12553 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12554 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12555
12556 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12557 IEM_MC_PREPARE_FPU_USAGE();
12558 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12559 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
12560 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12561 } IEM_MC_ELSE() {
12562 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12563 } IEM_MC_ENDIF();
12564 IEM_MC_ADVANCE_RIP_AND_FINISH();
12565
12566 IEM_MC_END();
12567}
12568
12569
12570/** Opcode 0xdd !11/0. */
12571FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
12572{
12573 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
12574 IEM_MC_BEGIN(0, 0);
12575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12577
12578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12579 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12580 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12581 IEM_MC_PREPARE_FPU_USAGE();
12582
12583 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12584 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12585 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12586
12587 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12588 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12589 IEM_MC_LOCAL(uint16_t, u16Fsw);
12590 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12591 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12592 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12593 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12594 } IEM_MC_ELSE() {
12595 IEM_MC_IF_FCW_IM() {
12596 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12597 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12598 } IEM_MC_ELSE() {
12599 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12600 } IEM_MC_ENDIF();
12601 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12602 } IEM_MC_ENDIF();
12603 IEM_MC_ADVANCE_RIP_AND_FINISH();
12604
12605 IEM_MC_END();
12606}
12607
12608
12609/** Opcode 0xdd !11/0. */
12610FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
12611{
12612 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
12613 IEM_MC_BEGIN(0, 0);
12614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12616
12617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12618 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12619 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12620 IEM_MC_PREPARE_FPU_USAGE();
12621
12622 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12623 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12624 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12625
12626 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12627 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12628 IEM_MC_LOCAL(uint16_t, u16Fsw);
12629 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12630 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12631 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12632 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12633 } IEM_MC_ELSE() {
12634 IEM_MC_IF_FCW_IM() {
12635 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12636 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12637 } IEM_MC_ELSE() {
12638 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12639 } IEM_MC_ENDIF();
12640 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12641 } IEM_MC_ENDIF();
12642 IEM_MC_ADVANCE_RIP_AND_FINISH();
12643
12644 IEM_MC_END();
12645}
12646
12647
12648
12649
12650/** Opcode 0xdd !11/0. */
12651FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
12652{
12653 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
12654 IEM_MC_BEGIN(0, 0);
12655 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12657
12658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12659 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12660 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12661 IEM_MC_PREPARE_FPU_USAGE();
12662
12663 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12664 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12665 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12666
12667 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12668 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12669 IEM_MC_LOCAL(uint16_t, u16Fsw);
12670 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12671 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12672 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12673 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12674 } IEM_MC_ELSE() {
12675 IEM_MC_IF_FCW_IM() {
12676 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12677 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12678 } IEM_MC_ELSE() {
12679 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12680 } IEM_MC_ENDIF();
12681 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12682 } IEM_MC_ENDIF();
12683 IEM_MC_ADVANCE_RIP_AND_FINISH();
12684
12685 IEM_MC_END();
12686}
12687
12688
12689/** Opcode 0xdd !11/0. */
12690FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
12691{
12692 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
12693 IEM_MC_BEGIN(0, 0);
12694 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
12695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12696
12697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12698 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12699 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12700
12701 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12702 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12703 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12704 iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
12705 IEM_MC_END();
12706}
12707
12708
12709/** Opcode 0xdd !11/0. */
12710FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
12711{
12712 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
12713 IEM_MC_BEGIN(0, 0);
12714 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
12715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12716
12717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12718 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12719 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
12720
12721 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12722 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12723 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12724 iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
12725 IEM_MC_END();
12726}
12727
12728/** Opcode 0xdd !11/0. */
12729FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
12730{
12731 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
12732
12733 IEM_MC_BEGIN(0, 0);
12734 IEM_MC_LOCAL(uint16_t, u16Tmp);
12735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12736
12737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12739 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12740
12741 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12742 IEM_MC_FETCH_FSW(u16Tmp);
12743 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
12744 IEM_MC_ADVANCE_RIP_AND_FINISH();
12745
12746/** @todo Debug / drop a hint to the verifier that things may differ
12747 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
12748 * NT4SP1. (X86_FSW_PE) */
12749 IEM_MC_END();
12750}
12751
12752
12753/** Opcode 0xdd 11/0. */
12754FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
12755{
12756 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
12757 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
12758 unmodified. */
12759 IEM_MC_BEGIN(0, 0);
12760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12761
12762 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12763 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12764
12765 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12766 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12767 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12768
12769 IEM_MC_ADVANCE_RIP_AND_FINISH();
12770 IEM_MC_END();
12771}
12772
12773
12774/** Opcode 0xdd 11/1. */
12775FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
12776{
12777 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
12778 IEM_MC_BEGIN(0, 0);
12779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12780 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
12781 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12782 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12783 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12784
12785 IEM_MC_PREPARE_FPU_USAGE();
12786 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12787 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
12788 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12789 } IEM_MC_ELSE() {
12790 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12791 } IEM_MC_ENDIF();
12792
12793 IEM_MC_ADVANCE_RIP_AND_FINISH();
12794 IEM_MC_END();
12795}
12796
12797
12798/** Opcode 0xdd 11/3. */
12799FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
12800{
12801 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
12802 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
12803}
12804
12805
12806/** Opcode 0xdd 11/4. */
12807FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
12808{
12809 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
12810 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
12811}
12812
12813
12814/**
12815 * @opcode 0xdd
12816 */
12817FNIEMOP_DEF(iemOp_EscF5)
12818{
12819 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12820 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
12821 if (IEM_IS_MODRM_REG_MODE(bRm))
12822 {
12823 switch (IEM_GET_MODRM_REG_8(bRm))
12824 {
12825 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
12826 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
12827 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
12828 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
12829 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
12830 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
12831 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
12832 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12833 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12834 }
12835 }
12836 else
12837 {
12838 switch (IEM_GET_MODRM_REG_8(bRm))
12839 {
12840 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
12841 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
12842 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
12843 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
12844 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
12845 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
12846 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
12847 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
12848 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12849 }
12850 }
12851}
12852
12853
12854/** Opcode 0xde 11/0. */
12855FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
12856{
12857 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
12858 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
12859}
12860
12861
12862/** Opcode 0xde 11/0. */
12863FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
12864{
12865 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
12866 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
12867}
12868
12869
12870/** Opcode 0xde 0xd9. */
12871FNIEMOP_DEF(iemOp_fcompp)
12872{
12873 IEMOP_MNEMONIC(fcompp, "fcompp");
12874 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
12875}
12876
12877
12878/** Opcode 0xde 11/4. */
12879FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
12880{
12881 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
12882 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
12883}
12884
12885
12886/** Opcode 0xde 11/5. */
12887FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
12888{
12889 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
12890 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
12891}
12892
12893
12894/** Opcode 0xde 11/6. */
12895FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
12896{
12897 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
12898 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
12899}
12900
12901
12902/** Opcode 0xde 11/7. */
12903FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
12904{
12905 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
12906 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
12907}
12908
12909
12910/**
12911 * Common worker for FPU instructions working on ST0 and an m16i, and storing
12912 * the result in ST0.
12913 *
12914 * @param bRm Mod R/M byte.
12915 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12916 */
12917FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
12918{
12919 IEM_MC_BEGIN(0, 0);
12920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12921 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12922 IEM_MC_LOCAL(int16_t, i16Val2);
12923 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12924 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12925 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12926
12927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12929
12930 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12931 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12932 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12933
12934 IEM_MC_PREPARE_FPU_USAGE();
12935 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12936 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
12937 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
12938 } IEM_MC_ELSE() {
12939 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12940 } IEM_MC_ENDIF();
12941 IEM_MC_ADVANCE_RIP_AND_FINISH();
12942
12943 IEM_MC_END();
12944}
12945
12946
12947/** Opcode 0xde !11/0. */
12948FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
12949{
12950 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
12951 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
12952}
12953
12954
12955/** Opcode 0xde !11/1. */
12956FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
12957{
12958 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
12959 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
12960}
12961
12962
12963/** Opcode 0xde !11/2. */
12964FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
12965{
12966 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
12967
12968 IEM_MC_BEGIN(0, 0);
12969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12970 IEM_MC_LOCAL(uint16_t, u16Fsw);
12971 IEM_MC_LOCAL(int16_t, i16Val2);
12972 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12973 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12974 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12975
12976 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12978
12979 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12980 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12981 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12982
12983 IEM_MC_PREPARE_FPU_USAGE();
12984 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12985 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
12986 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12987 } IEM_MC_ELSE() {
12988 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12989 } IEM_MC_ENDIF();
12990 IEM_MC_ADVANCE_RIP_AND_FINISH();
12991
12992 IEM_MC_END();
12993}
12994
12995
12996/** Opcode 0xde !11/3. */
12997FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
12998{
12999 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
13000
13001 IEM_MC_BEGIN(0, 0);
13002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13003 IEM_MC_LOCAL(uint16_t, u16Fsw);
13004 IEM_MC_LOCAL(int16_t, i16Val2);
13005 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13006 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13007 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
13008
13009 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13011
13012 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13013 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13014 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13015
13016 IEM_MC_PREPARE_FPU_USAGE();
13017 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
13018 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
13019 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13020 } IEM_MC_ELSE() {
13021 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13022 } IEM_MC_ENDIF();
13023 IEM_MC_ADVANCE_RIP_AND_FINISH();
13024
13025 IEM_MC_END();
13026}
13027
13028
13029/** Opcode 0xde !11/4. */
13030FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
13031{
13032 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
13033 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
13034}
13035
13036
13037/** Opcode 0xde !11/5. */
13038FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
13039{
13040 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
13041 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
13042}
13043
13044
13045/** Opcode 0xde !11/6. */
13046FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
13047{
13048 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
13049 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
13050}
13051
13052
13053/** Opcode 0xde !11/7. */
13054FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
13055{
13056 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
13057 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
13058}
13059
13060
13061/**
13062 * @opcode 0xde
13063 */
13064FNIEMOP_DEF(iemOp_EscF6)
13065{
13066 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13067 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
13068 if (IEM_IS_MODRM_REG_MODE(bRm))
13069 {
13070 switch (IEM_GET_MODRM_REG_8(bRm))
13071 {
13072 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
13073 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
13074 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13075 case 3: if (bRm == 0xd9)
13076 return FNIEMOP_CALL(iemOp_fcompp);
13077 IEMOP_RAISE_INVALID_OPCODE_RET();
13078 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
13079 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
13080 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
13081 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
13082 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13083 }
13084 }
13085 else
13086 {
13087 switch (IEM_GET_MODRM_REG_8(bRm))
13088 {
13089 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
13090 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
13091 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
13092 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
13093 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
13094 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
13095 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
13096 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
13097 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13098 }
13099 }
13100}
13101
13102
13103/** Opcode 0xdf 11/0.
13104 * Undocument instruction, assumed to work like ffree + fincstp. */
13105FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
13106{
13107 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
13108 IEM_MC_BEGIN(0, 0);
13109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13110
13111 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13112 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13113
13114 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13115 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
13116 IEM_MC_FPU_STACK_INC_TOP();
13117 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
13118
13119 IEM_MC_ADVANCE_RIP_AND_FINISH();
13120 IEM_MC_END();
13121}
13122
13123
13124/** Opcode 0xdf 0xe0. */
13125FNIEMOP_DEF(iemOp_fnstsw_ax)
13126{
13127 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
13128 IEM_MC_BEGIN(0, 0);
13129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13130 IEM_MC_LOCAL(uint16_t, u16Tmp);
13131 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13132 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
13133 IEM_MC_FETCH_FSW(u16Tmp);
13134 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
13135 IEM_MC_ADVANCE_RIP_AND_FINISH();
13136 IEM_MC_END();
13137}
13138
13139
13140/** Opcode 0xdf 11/5. */
13141FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
13142{
13143 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
13144 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
13145 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
13146 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
13147}
13148
13149
13150/** Opcode 0xdf 11/6. */
13151FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
13152{
13153 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
13154 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
13155 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
13156 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
13157}
13158
13159
13160/** Opcode 0xdf !11/0. */
13161FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
13162{
13163 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
13164
13165 IEM_MC_BEGIN(0, 0);
13166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13167 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13168 IEM_MC_LOCAL(int16_t, i16Val);
13169 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13170 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
13171
13172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13174
13175 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13176 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13177 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13178
13179 IEM_MC_PREPARE_FPU_USAGE();
13180 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
13181 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
13182 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13183 } IEM_MC_ELSE() {
13184 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13185 } IEM_MC_ENDIF();
13186 IEM_MC_ADVANCE_RIP_AND_FINISH();
13187
13188 IEM_MC_END();
13189}
13190
13191
13192/** Opcode 0xdf !11/1. */
13193FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
13194{
13195 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
13196 IEM_MC_BEGIN(0, 0);
13197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13199
13200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13201 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13202 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13203 IEM_MC_PREPARE_FPU_USAGE();
13204
13205 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13206 IEM_MC_ARG(int16_t *, pi16Dst, 1);
13207 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13208
13209 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13210 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13211 IEM_MC_LOCAL(uint16_t, u16Fsw);
13212 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13213 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
13214 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13215 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13216 } IEM_MC_ELSE() {
13217 IEM_MC_IF_FCW_IM() {
13218 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
13219 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13220 } IEM_MC_ELSE() {
13221 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13222 } IEM_MC_ENDIF();
13223 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13224 } IEM_MC_ENDIF();
13225 IEM_MC_ADVANCE_RIP_AND_FINISH();
13226
13227 IEM_MC_END();
13228}
13229
13230
13231/** Opcode 0xdf !11/2. */
13232FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
13233{
13234 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
13235 IEM_MC_BEGIN(0, 0);
13236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13238
13239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13240 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13241 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13242 IEM_MC_PREPARE_FPU_USAGE();
13243
13244 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13245 IEM_MC_ARG(int16_t *, pi16Dst, 1);
13246 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13247
13248 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13249 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13250 IEM_MC_LOCAL(uint16_t, u16Fsw);
13251 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13252 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
13253 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13254 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13255 } IEM_MC_ELSE() {
13256 IEM_MC_IF_FCW_IM() {
13257 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
13258 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13259 } IEM_MC_ELSE() {
13260 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13261 } IEM_MC_ENDIF();
13262 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13263 } IEM_MC_ENDIF();
13264 IEM_MC_ADVANCE_RIP_AND_FINISH();
13265
13266 IEM_MC_END();
13267}
13268
13269
13270/** Opcode 0xdf !11/3. */
13271FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
13272{
13273 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
13274 IEM_MC_BEGIN(0, 0);
13275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13277
13278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13279 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13280 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13281 IEM_MC_PREPARE_FPU_USAGE();
13282
13283 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13284 IEM_MC_ARG(int16_t *, pi16Dst, 1);
13285 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13286
13287 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13288 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13289 IEM_MC_LOCAL(uint16_t, u16Fsw);
13290 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13291 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
13292 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13293 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13294 } IEM_MC_ELSE() {
13295 IEM_MC_IF_FCW_IM() {
13296 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
13297 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13298 } IEM_MC_ELSE() {
13299 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13300 } IEM_MC_ENDIF();
13301 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13302 } IEM_MC_ENDIF();
13303 IEM_MC_ADVANCE_RIP_AND_FINISH();
13304
13305 IEM_MC_END();
13306}
13307
13308
13309/** Opcode 0xdf !11/4. */
13310FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
13311{
13312 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
13313
13314 IEM_MC_BEGIN(0, 0);
13315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13316 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13317 IEM_MC_LOCAL(RTPBCD80U, d80Val);
13318 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13319 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
13320
13321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13323
13324 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13325 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13326 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13327
13328 IEM_MC_PREPARE_FPU_USAGE();
13329 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
13330 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
13331 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13332 } IEM_MC_ELSE() {
13333 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13334 } IEM_MC_ENDIF();
13335 IEM_MC_ADVANCE_RIP_AND_FINISH();
13336
13337 IEM_MC_END();
13338}
13339
13340
13341/** Opcode 0xdf !11/5. */
13342FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
13343{
13344 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
13345
13346 IEM_MC_BEGIN(0, 0);
13347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13348 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13349 IEM_MC_LOCAL(int64_t, i64Val);
13350 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13351 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
13352
13353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13355
13356 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13357 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13358 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13359
13360 IEM_MC_PREPARE_FPU_USAGE();
13361 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
13362 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
13363 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13364 } IEM_MC_ELSE() {
13365 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13366 } IEM_MC_ENDIF();
13367 IEM_MC_ADVANCE_RIP_AND_FINISH();
13368
13369 IEM_MC_END();
13370}
13371
13372
13373/** Opcode 0xdf !11/6. */
13374FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
13375{
13376 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
13377 IEM_MC_BEGIN(0, 0);
13378 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13380
13381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13382 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13383 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13384 IEM_MC_PREPARE_FPU_USAGE();
13385
13386 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13387 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
13388 IEM_MC_MEM_MAP_D80_WO(pd80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13389
13390 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13391 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13392 IEM_MC_LOCAL(uint16_t, u16Fsw);
13393 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13394 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
13395 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13396 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13397 } IEM_MC_ELSE() {
13398 IEM_MC_IF_FCW_IM() {
13399 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
13400 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13401 } IEM_MC_ELSE() {
13402 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13403 } IEM_MC_ENDIF();
13404 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13405 } IEM_MC_ENDIF();
13406 IEM_MC_ADVANCE_RIP_AND_FINISH();
13407
13408 IEM_MC_END();
13409}
13410
13411
13412/** Opcode 0xdf !11/7. */
13413FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
13414{
13415 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
13416 IEM_MC_BEGIN(0, 0);
13417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13419
13420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13421 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13422 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13423 IEM_MC_PREPARE_FPU_USAGE();
13424
13425 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13426 IEM_MC_ARG(int64_t *, pi64Dst, 1);
13427 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13428
13429 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13430 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13431 IEM_MC_LOCAL(uint16_t, u16Fsw);
13432 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13433 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
13434 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13435 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13436 } IEM_MC_ELSE() {
13437 IEM_MC_IF_FCW_IM() {
13438 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
13439 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13440 } IEM_MC_ELSE() {
13441 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13442 } IEM_MC_ENDIF();
13443 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13444 } IEM_MC_ENDIF();
13445 IEM_MC_ADVANCE_RIP_AND_FINISH();
13446
13447 IEM_MC_END();
13448}
13449
13450
13451/**
13452 * @opcode 0xdf
13453 */
13454FNIEMOP_DEF(iemOp_EscF7)
13455{
13456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13457 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
13458 if (IEM_IS_MODRM_REG_MODE(bRm))
13459 {
13460 switch (IEM_GET_MODRM_REG_8(bRm))
13461 {
13462 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
13463 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
13464 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13465 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13466 case 4: if (bRm == 0xe0)
13467 return FNIEMOP_CALL(iemOp_fnstsw_ax);
13468 IEMOP_RAISE_INVALID_OPCODE_RET();
13469 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
13470 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
13471 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
13472 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13473 }
13474 }
13475 else
13476 {
13477 switch (IEM_GET_MODRM_REG_8(bRm))
13478 {
13479 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
13480 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
13481 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
13482 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
13483 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
13484 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
13485 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
13486 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
13487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13488 }
13489 }
13490}
13491
13492
13493/**
13494 * @opcode 0xe0
13495 * @opfltest zf
13496 */
13497FNIEMOP_DEF(iemOp_loopne_Jb)
13498{
13499 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
13500 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13501 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13502
13503 switch (pVCpu->iem.s.enmEffAddrMode)
13504 {
13505 case IEMMODE_16BIT:
13506 IEM_MC_BEGIN(IEM_MC_F_NOT_64BIT, 0);
13507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13508 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13509 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13510 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13511 } IEM_MC_ELSE() {
13512 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13513 IEM_MC_ADVANCE_RIP_AND_FINISH();
13514 } IEM_MC_ENDIF();
13515 IEM_MC_END();
13516 break;
13517
13518 case IEMMODE_32BIT:
13519 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
13520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13521 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13522 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13523 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13524 } IEM_MC_ELSE() {
13525 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13526 IEM_MC_ADVANCE_RIP_AND_FINISH();
13527 } IEM_MC_ENDIF();
13528 IEM_MC_END();
13529 break;
13530
13531 case IEMMODE_64BIT:
13532 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
13533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13534 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13535 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13536 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13537 } IEM_MC_ELSE() {
13538 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13539 IEM_MC_ADVANCE_RIP_AND_FINISH();
13540 } IEM_MC_ENDIF();
13541 IEM_MC_END();
13542 break;
13543
13544 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13545 }
13546}
13547
13548
13549/**
13550 * @opcode 0xe1
13551 * @opfltest zf
13552 */
13553FNIEMOP_DEF(iemOp_loope_Jb)
13554{
13555 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
13556 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13557 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13558
13559 switch (pVCpu->iem.s.enmEffAddrMode)
13560 {
13561 case IEMMODE_16BIT:
13562 IEM_MC_BEGIN(IEM_MC_F_NOT_64BIT, 0);
13563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13564 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13565 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13566 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13567 } IEM_MC_ELSE() {
13568 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13569 IEM_MC_ADVANCE_RIP_AND_FINISH();
13570 } IEM_MC_ENDIF();
13571 IEM_MC_END();
13572 break;
13573
13574 case IEMMODE_32BIT:
13575 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
13576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13577 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13578 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13579 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13580 } IEM_MC_ELSE() {
13581 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13582 IEM_MC_ADVANCE_RIP_AND_FINISH();
13583 } IEM_MC_ENDIF();
13584 IEM_MC_END();
13585 break;
13586
13587 case IEMMODE_64BIT:
13588 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
13589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13590 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13591 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13592 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13593 } IEM_MC_ELSE() {
13594 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13595 IEM_MC_ADVANCE_RIP_AND_FINISH();
13596 } IEM_MC_ENDIF();
13597 IEM_MC_END();
13598 break;
13599
13600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13601 }
13602}
13603
13604
13605/**
13606 * @opcode 0xe2
13607 */
13608FNIEMOP_DEF(iemOp_loop_Jb)
13609{
13610 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
13611 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13612 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13613
13614 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
13615 * using the 32-bit operand size override. How can that be restarted? See
13616 * weird pseudo code in intel manual. */
13617
13618 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
13619 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
13620 * the loop causes guest crashes, but when logging it's nice to skip a few million
13621 * lines of useless output. */
13622#if defined(LOG_ENABLED)
13623 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
13624 switch (pVCpu->iem.s.enmEffAddrMode)
13625 {
13626 case IEMMODE_16BIT:
13627 IEM_MC_BEGIN(IEM_MC_F_NOT_64BIT, 0);
13628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13629 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13630 IEM_MC_ADVANCE_RIP_AND_FINISH();
13631 IEM_MC_END();
13632 break;
13633
13634 case IEMMODE_32BIT:
13635 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
13636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13637 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13638 IEM_MC_ADVANCE_RIP_AND_FINISH();
13639 IEM_MC_END();
13640 break;
13641
13642 case IEMMODE_64BIT:
13643 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
13644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13645 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13646 IEM_MC_ADVANCE_RIP_AND_FINISH();
13647 IEM_MC_END();
13648 break;
13649
13650 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13651 }
13652#endif
13653
13654 switch (pVCpu->iem.s.enmEffAddrMode)
13655 {
13656 case IEMMODE_16BIT:
13657 IEM_MC_BEGIN(IEM_MC_F_NOT_64BIT, 0);
13658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13659 IEM_MC_IF_CX_IS_NOT_ONE() {
13660 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13661 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13662 } IEM_MC_ELSE() {
13663 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13664 IEM_MC_ADVANCE_RIP_AND_FINISH();
13665 } IEM_MC_ENDIF();
13666 IEM_MC_END();
13667 break;
13668
13669 case IEMMODE_32BIT:
13670 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
13671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13672 IEM_MC_IF_ECX_IS_NOT_ONE() {
13673 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13674 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13675 } IEM_MC_ELSE() {
13676 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13677 IEM_MC_ADVANCE_RIP_AND_FINISH();
13678 } IEM_MC_ENDIF();
13679 IEM_MC_END();
13680 break;
13681
13682 case IEMMODE_64BIT:
13683 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
13684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13685 IEM_MC_IF_RCX_IS_NOT_ONE() {
13686 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13687 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13688 } IEM_MC_ELSE() {
13689 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13690 IEM_MC_ADVANCE_RIP_AND_FINISH();
13691 } IEM_MC_ENDIF();
13692 IEM_MC_END();
13693 break;
13694
13695 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13696 }
13697}
13698
13699
13700/**
13701 * @opcode 0xe3
13702 */
13703FNIEMOP_DEF(iemOp_jecxz_Jb)
13704{
13705 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
13706 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13707 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13708
13709 switch (pVCpu->iem.s.enmEffAddrMode)
13710 {
13711 case IEMMODE_16BIT:
13712 IEM_MC_BEGIN(IEM_MC_F_NOT_64BIT, 0);
13713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13714 IEM_MC_IF_CX_IS_NZ() {
13715 IEM_MC_ADVANCE_RIP_AND_FINISH();
13716 } IEM_MC_ELSE() {
13717 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13718 } IEM_MC_ENDIF();
13719 IEM_MC_END();
13720 break;
13721
13722 case IEMMODE_32BIT:
13723 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
13724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13725 IEM_MC_IF_ECX_IS_NZ() {
13726 IEM_MC_ADVANCE_RIP_AND_FINISH();
13727 } IEM_MC_ELSE() {
13728 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13729 } IEM_MC_ENDIF();
13730 IEM_MC_END();
13731 break;
13732
13733 case IEMMODE_64BIT:
13734 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
13735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13736 IEM_MC_IF_RCX_IS_NZ() {
13737 IEM_MC_ADVANCE_RIP_AND_FINISH();
13738 } IEM_MC_ELSE() {
13739 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13740 } IEM_MC_ENDIF();
13741 IEM_MC_END();
13742 break;
13743
13744 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13745 }
13746}
13747
13748
13749/**
13750 * @opcode 0xe4
13751 * @opfltest iopl
13752 */
13753FNIEMOP_DEF(iemOp_in_AL_Ib)
13754{
13755 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
13756 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13758 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13759 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13760}
13761
13762
13763/**
13764 * @opcode 0xe5
13765 * @opfltest iopl
13766 */
13767FNIEMOP_DEF(iemOp_in_eAX_Ib)
13768{
13769 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
13770 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13772 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13773 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13774 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13775}
13776
13777
13778/**
13779 * @opcode 0xe6
13780 * @opfltest iopl
13781 */
13782FNIEMOP_DEF(iemOp_out_Ib_AL)
13783{
13784 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
13785 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13787 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13788 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13789}
13790
13791
13792/**
13793 * @opcode 0xe7
13794 * @opfltest iopl
13795 */
13796FNIEMOP_DEF(iemOp_out_Ib_eAX)
13797{
13798 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
13799 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13801 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13802 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13803 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13804}
13805
13806
13807/**
13808 * @opcode 0xe8
13809 */
13810FNIEMOP_DEF(iemOp_call_Jv)
13811{
13812 IEMOP_MNEMONIC(call_Jv, "call Jv");
13813 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13814 switch (pVCpu->iem.s.enmEffOpSize)
13815 {
13816 case IEMMODE_16BIT:
13817 {
13818 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13819 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13820 iemCImpl_call_rel_16, (int16_t)u16Imm);
13821 }
13822
13823 case IEMMODE_32BIT:
13824 {
13825 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13826 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13827 iemCImpl_call_rel_32, (int32_t)u32Imm);
13828 }
13829
13830 case IEMMODE_64BIT:
13831 {
13832 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13833 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13834 iemCImpl_call_rel_64, u64Imm);
13835 }
13836
13837 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13838 }
13839}
13840
13841
13842/**
13843 * @opcode 0xe9
13844 */
13845FNIEMOP_DEF(iemOp_jmp_Jv)
13846{
13847 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
13848 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13849 switch (pVCpu->iem.s.enmEffOpSize)
13850 {
13851 case IEMMODE_16BIT:
13852 IEM_MC_BEGIN(0, 0);
13853 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
13854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13855 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
13856 IEM_MC_END();
13857 break;
13858
13859 case IEMMODE_64BIT:
13860 case IEMMODE_32BIT:
13861 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
13862 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
13863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13864 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
13865 IEM_MC_END();
13866 break;
13867
13868 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13869 }
13870}
13871
13872
13873/**
13874 * @opcode 0xea
13875 */
13876FNIEMOP_DEF(iemOp_jmp_Ap)
13877{
13878 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
13879 IEMOP_HLP_NO_64BIT();
13880
13881 /* Decode the far pointer address and pass it on to the far call C implementation. */
13882 uint32_t off32Seg;
13883 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
13884 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
13885 else
13886 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
13887 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
13888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13889 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
13890 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
13891 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
13892 /** @todo make task-switches, ring-switches, ++ return non-zero status */
13893}
13894
13895
13896/**
13897 * @opcode 0xeb
13898 */
13899FNIEMOP_DEF(iemOp_jmp_Jb)
13900{
13901 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
13902 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13903 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13904
13905 IEM_MC_BEGIN(0, 0);
13906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13907 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13908 IEM_MC_END();
13909}
13910
13911
13912/**
13913 * @opcode 0xec
13914 * @opfltest iopl
13915 */
13916FNIEMOP_DEF(iemOp_in_AL_DX)
13917{
13918 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
13919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13920 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
13921 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13922 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
13923}
13924
13925
13926/**
13927 * @opcode 0xed
13928 * @opfltest iopl
13929 */
13930FNIEMOP_DEF(iemOp_in_eAX_DX)
13931{
13932 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
13933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13934 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
13935 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13936 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13937 pVCpu->iem.s.enmEffAddrMode);
13938}
13939
13940
13941/**
13942 * @opcode 0xee
13943 * @opfltest iopl
13944 */
13945FNIEMOP_DEF(iemOp_out_DX_AL)
13946{
13947 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
13948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13949 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13950 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
13951}
13952
13953
13954/**
13955 * @opcode 0xef
13956 * @opfltest iopl
13957 */
13958FNIEMOP_DEF(iemOp_out_DX_eAX)
13959{
13960 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
13961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13962 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13963 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13964 pVCpu->iem.s.enmEffAddrMode);
13965}
13966
13967
13968/**
13969 * @opcode 0xf0
13970 */
13971FNIEMOP_DEF(iemOp_lock)
13972{
13973 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
13974 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
13975
13976 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13977 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13978}
13979
13980
13981/**
13982 * @opcode 0xf1
13983 */
13984FNIEMOP_DEF(iemOp_int1)
13985{
13986 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
13987 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
13988 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
13989 * LOADALL memo. Needs some testing. */
13990 IEMOP_HLP_MIN_386();
13991 /** @todo testcase! */
13992 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
13993 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
13994 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
13995}
13996
13997
13998/**
13999 * @opcode 0xf2
14000 */
14001FNIEMOP_DEF(iemOp_repne)
14002{
14003 /* This overrides any previous REPE prefix. */
14004 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
14005 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
14006 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
14007
14008 /* For the 4 entry opcode tables, REPNZ overrides any previous
14009 REPZ and operand size prefixes. */
14010 pVCpu->iem.s.idxPrefix = 3;
14011
14012 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14013 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14014}
14015
14016
14017/**
14018 * @opcode 0xf3
14019 */
14020FNIEMOP_DEF(iemOp_repe)
14021{
14022 /* This overrides any previous REPNE prefix. */
14023 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
14024 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
14025 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
14026
14027 /* For the 4 entry opcode tables, REPNZ overrides any previous
14028 REPNZ and operand size prefixes. */
14029 pVCpu->iem.s.idxPrefix = 2;
14030
14031 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14032 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14033}
14034
14035
14036/**
14037 * @opcode 0xf4
14038 */
14039FNIEMOP_DEF(iemOp_hlt)
14040{
14041 IEMOP_MNEMONIC(hlt, "hlt");
14042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14043 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_hlt);
14044}
14045
14046
14047/**
14048 * @opcode 0xf5
14049 * @opflmodify cf
14050 */
14051FNIEMOP_DEF(iemOp_cmc)
14052{
14053 IEMOP_MNEMONIC(cmc, "cmc");
14054 IEM_MC_BEGIN(0, 0);
14055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14056 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
14057 IEM_MC_ADVANCE_RIP_AND_FINISH();
14058 IEM_MC_END();
14059}
14060
14061
14062/**
14063 * Body for of 'inc/dec/not/neg Eb'.
14064 */
14065#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
14066 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
14067 { \
14068 /* register access */ \
14069 IEM_MC_BEGIN(0, 0); \
14070 IEMOP_HLP_DONE_DECODING(); \
14071 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
14072 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
14073 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
14074 IEM_MC_REF_EFLAGS(pEFlags); \
14075 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
14076 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14077 IEM_MC_END(); \
14078 } \
14079 else \
14080 { \
14081 /* memory access. */ \
14082 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
14083 { \
14084 IEM_MC_BEGIN(0, 0); \
14085 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
14086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14087 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14088 \
14089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
14090 IEMOP_HLP_DONE_DECODING(); \
14091 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14092 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14093 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
14094 \
14095 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
14096 IEM_MC_COMMIT_EFLAGS(EFlags); \
14097 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14098 IEM_MC_END(); \
14099 } \
14100 else \
14101 { \
14102 IEM_MC_BEGIN(0, 0); \
14103 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
14104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14105 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14106 \
14107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
14108 IEMOP_HLP_DONE_DECODING(); \
14109 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14110 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14111 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
14112 \
14113 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14114 IEM_MC_COMMIT_EFLAGS(EFlags); \
14115 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14116 IEM_MC_END(); \
14117 } \
14118 } \
14119 (void)0
14120
14121
14122/**
14123 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
14124 */
14125#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
14126 if (IEM_IS_MODRM_REG_MODE(bRm)) \
14127 { \
14128 /* \
14129 * Register target \
14130 */ \
14131 switch (pVCpu->iem.s.enmEffOpSize) \
14132 { \
14133 case IEMMODE_16BIT: \
14134 IEM_MC_BEGIN(0, 0); \
14135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14136 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
14137 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
14138 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14139 IEM_MC_REF_EFLAGS(pEFlags); \
14140 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
14141 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14142 IEM_MC_END(); \
14143 break; \
14144 \
14145 case IEMMODE_32BIT: \
14146 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
14147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14148 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
14149 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
14150 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14151 IEM_MC_REF_EFLAGS(pEFlags); \
14152 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
14153 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
14154 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14155 IEM_MC_END(); \
14156 break; \
14157 \
14158 case IEMMODE_64BIT: \
14159 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
14160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14161 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
14162 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
14163 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14164 IEM_MC_REF_EFLAGS(pEFlags); \
14165 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
14166 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14167 IEM_MC_END(); \
14168 break; \
14169 \
14170 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14171 } \
14172 } \
14173 else \
14174 { \
14175 /* \
14176 * Memory target. \
14177 */ \
14178 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
14179 { \
14180 switch (pVCpu->iem.s.enmEffOpSize) \
14181 { \
14182 case IEMMODE_16BIT: \
14183 IEM_MC_BEGIN(0, 0); \
14184 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
14185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14186 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14187 \
14188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14189 IEMOP_HLP_DONE_DECODING(); \
14190 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14191 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14192 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
14193 \
14194 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
14195 IEM_MC_COMMIT_EFLAGS(EFlags); \
14196 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14197 IEM_MC_END(); \
14198 break; \
14199 \
14200 case IEMMODE_32BIT: \
14201 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
14202 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
14203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14204 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14205 \
14206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14207 IEMOP_HLP_DONE_DECODING(); \
14208 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14209 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14210 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
14211 \
14212 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
14213 IEM_MC_COMMIT_EFLAGS(EFlags); \
14214 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14215 IEM_MC_END(); \
14216 break; \
14217 \
14218 case IEMMODE_64BIT: \
14219 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
14220 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
14221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14222 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14223 \
14224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14225 IEMOP_HLP_DONE_DECODING(); \
14226 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14227 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14228 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
14229 \
14230 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
14231 IEM_MC_COMMIT_EFLAGS(EFlags); \
14232 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14233 IEM_MC_END(); \
14234 break; \
14235 \
14236 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14237 } \
14238 } \
14239 else \
14240 { \
14241 (void)0
14242
14243#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
14244 switch (pVCpu->iem.s.enmEffOpSize) \
14245 { \
14246 case IEMMODE_16BIT: \
14247 IEM_MC_BEGIN(0, 0); \
14248 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
14249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14250 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14251 \
14252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14253 IEMOP_HLP_DONE_DECODING(); \
14254 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14255 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14256 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
14257 \
14258 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14259 IEM_MC_COMMIT_EFLAGS(EFlags); \
14260 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14261 IEM_MC_END(); \
14262 break; \
14263 \
14264 case IEMMODE_32BIT: \
14265 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
14266 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
14267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14268 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14269 \
14270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14271 IEMOP_HLP_DONE_DECODING(); \
14272 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14273 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14274 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
14275 \
14276 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14277 IEM_MC_COMMIT_EFLAGS(EFlags); \
14278 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14279 IEM_MC_END(); \
14280 break; \
14281 \
14282 case IEMMODE_64BIT: \
14283 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
14284 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
14285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14286 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14287 \
14288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14289 IEMOP_HLP_DONE_DECODING(); \
14290 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14291 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14292 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
14293 \
14294 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14295 IEM_MC_COMMIT_EFLAGS(EFlags); \
14296 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14297 IEM_MC_END(); \
14298 break; \
14299 \
14300 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14301 } \
14302 } \
14303 } \
14304 (void)0
14305
14306
14307/**
14308 * @opmaps grp3_f6
14309 * @opcode /0
14310 * @opflclass logical
14311 * @todo also /1
14312 */
14313FNIEMOP_DEF_1(iemOp_grp3_test_Eb_Ib, uint8_t, bRm)
14314{
14315 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
14316 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14317 IEMOP_BODY_BINARY_Eb_Ib_RO(test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
14318}
14319
14320
14321/* Body for opcode 0xf6 variations /4, /5, /6 and /7. */
14322#define IEMOP_GRP3_MUL_DIV_EB(bRm, a_pfnU8Expr) \
14323 PFNIEMAIMPLMULDIVU8 const pfnU8 = (a_pfnU8Expr); \
14324 if (IEM_IS_MODRM_REG_MODE(bRm)) \
14325 { \
14326 /* register access */ \
14327 IEM_MC_BEGIN(0, 0); \
14328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14329 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14330 IEM_MC_ARG(uint8_t, u8Value, 1); \
14331 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
14332 IEM_MC_LOCAL(int32_t, rc); \
14333 \
14334 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14335 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14336 IEM_MC_REF_EFLAGS(pEFlags); \
14337 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags); \
14338 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14339 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14340 } IEM_MC_ELSE() { \
14341 IEM_MC_RAISE_DIVIDE_ERROR(); \
14342 } IEM_MC_ENDIF(); \
14343 \
14344 IEM_MC_END(); \
14345 } \
14346 else \
14347 { \
14348 /* memory access. */ \
14349 IEM_MC_BEGIN(0, 0); \
14350 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14351 IEM_MC_ARG(uint8_t, u8Value, 1); \
14352 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
14353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14354 IEM_MC_LOCAL(int32_t, rc); \
14355 \
14356 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14358 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14359 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14360 IEM_MC_REF_EFLAGS(pEFlags); \
14361 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags); \
14362 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14363 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14364 } IEM_MC_ELSE() { \
14365 IEM_MC_RAISE_DIVIDE_ERROR(); \
14366 } IEM_MC_ENDIF(); \
14367 \
14368 IEM_MC_END(); \
14369 } (void)0
14370
14371
14372/* Body for opcode 0xf7 variant /4, /5, /6 and /7. */
14373#define IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, a_pImplExpr) \
14374 PCIEMOPMULDIVSIZES const pImpl = (a_pImplExpr); \
14375 if (IEM_IS_MODRM_REG_MODE(bRm)) \
14376 { \
14377 /* register access */ \
14378 switch (pVCpu->iem.s.enmEffOpSize) \
14379 { \
14380 case IEMMODE_16BIT: \
14381 IEM_MC_BEGIN(0, 0); \
14382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14383 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14384 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
14385 IEM_MC_ARG(uint16_t, u16Value, 2); \
14386 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14387 IEM_MC_LOCAL(int32_t, rc); \
14388 \
14389 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14390 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14391 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
14392 IEM_MC_REF_EFLAGS(pEFlags); \
14393 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
14394 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14395 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14396 } IEM_MC_ELSE() { \
14397 IEM_MC_RAISE_DIVIDE_ERROR(); \
14398 } IEM_MC_ENDIF(); \
14399 \
14400 IEM_MC_END(); \
14401 break; \
14402 \
14403 case IEMMODE_32BIT: \
14404 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
14405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14406 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
14407 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
14408 IEM_MC_ARG(uint32_t, u32Value, 2); \
14409 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14410 IEM_MC_LOCAL(int32_t, rc); \
14411 \
14412 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14413 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
14414 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
14415 IEM_MC_REF_EFLAGS(pEFlags); \
14416 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
14417 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14418 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
14419 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
14420 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14421 } IEM_MC_ELSE() { \
14422 IEM_MC_RAISE_DIVIDE_ERROR(); \
14423 } IEM_MC_ENDIF(); \
14424 \
14425 IEM_MC_END(); \
14426 break; \
14427 \
14428 case IEMMODE_64BIT: \
14429 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
14430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14431 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
14432 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
14433 IEM_MC_ARG(uint64_t, u64Value, 2); \
14434 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14435 IEM_MC_LOCAL(int32_t, rc); \
14436 \
14437 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14438 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
14439 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
14440 IEM_MC_REF_EFLAGS(pEFlags); \
14441 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
14442 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14443 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14444 } IEM_MC_ELSE() { \
14445 IEM_MC_RAISE_DIVIDE_ERROR(); \
14446 } IEM_MC_ENDIF(); \
14447 \
14448 IEM_MC_END(); \
14449 break; \
14450 \
14451 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14452 } \
14453 } \
14454 else \
14455 { \
14456 /* memory access. */ \
14457 switch (pVCpu->iem.s.enmEffOpSize) \
14458 { \
14459 case IEMMODE_16BIT: \
14460 IEM_MC_BEGIN(0, 0); \
14461 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14462 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
14463 IEM_MC_ARG(uint16_t, u16Value, 2); \
14464 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14465 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14466 IEM_MC_LOCAL(int32_t, rc); \
14467 \
14468 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14470 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14471 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14472 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
14473 IEM_MC_REF_EFLAGS(pEFlags); \
14474 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
14475 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14476 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14477 } IEM_MC_ELSE() { \
14478 IEM_MC_RAISE_DIVIDE_ERROR(); \
14479 } IEM_MC_ENDIF(); \
14480 \
14481 IEM_MC_END(); \
14482 break; \
14483 \
14484 case IEMMODE_32BIT: \
14485 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
14486 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
14487 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
14488 IEM_MC_ARG(uint32_t, u32Value, 2); \
14489 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14491 IEM_MC_LOCAL(int32_t, rc); \
14492 \
14493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14495 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14496 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
14497 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
14498 IEM_MC_REF_EFLAGS(pEFlags); \
14499 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
14500 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14501 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
14502 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
14503 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14504 } IEM_MC_ELSE() { \
14505 IEM_MC_RAISE_DIVIDE_ERROR(); \
14506 } IEM_MC_ENDIF(); \
14507 \
14508 IEM_MC_END(); \
14509 break; \
14510 \
14511 case IEMMODE_64BIT: \
14512 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
14513 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
14514 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
14515 IEM_MC_ARG(uint64_t, u64Value, 2); \
14516 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14518 IEM_MC_LOCAL(int32_t, rc); \
14519 \
14520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14522 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14523 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
14524 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
14525 IEM_MC_REF_EFLAGS(pEFlags); \
14526 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
14527 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14528 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14529 } IEM_MC_ELSE() { \
14530 IEM_MC_RAISE_DIVIDE_ERROR(); \
14531 } IEM_MC_ENDIF(); \
14532 \
14533 IEM_MC_END(); \
14534 break; \
14535 \
14536 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14537 } \
14538 } (void)0
14539
14540
14541/**
14542 * @opmaps grp3_f6
14543 * @opcode /2
14544 * @opflclass unchanged
14545 */
14546FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
14547{
14548/** @todo does not modify EFLAGS. */
14549 IEMOP_MNEMONIC(not_Eb, "not Eb");
14550 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
14551}
14552
14553
14554/**
14555 * @opmaps grp3_f6
14556 * @opcode /3
14557 * @opflclass arithmetic
14558 */
14559FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
14560{
14561 IEMOP_MNEMONIC(net_Eb, "neg Eb");
14562 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
14563}
14564
14565
14566/**
14567 * @opcode 0xf6
14568 */
14569FNIEMOP_DEF(iemOp_Grp3_Eb)
14570{
14571 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14572 switch (IEM_GET_MODRM_REG_8(bRm))
14573 {
14574 case 0:
14575 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb_Ib, bRm);
14576 case 1:
14577 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb_Ib, bRm);
14578 case 2:
14579 return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
14580 case 3:
14581 return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
14582 case 4:
14583 {
14584 /**
14585 * @opdone
14586 * @opmaps grp3_f6
14587 * @opcode /4
14588 * @opflclass multiply
14589 */
14590 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
14591 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14592 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
14593 break;
14594 }
14595 case 5:
14596 {
14597 /**
14598 * @opdone
14599 * @opmaps grp3_f6
14600 * @opcode /5
14601 * @opflclass multiply
14602 */
14603 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
14604 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14605 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
14606 break;
14607 }
14608 case 6:
14609 {
14610 /**
14611 * @opdone
14612 * @opmaps grp3_f6
14613 * @opcode /6
14614 * @opflclass division
14615 */
14616 IEMOP_MNEMONIC(div_Eb, "div Eb");
14617 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14618 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
14619 break;
14620 }
14621 case 7:
14622 {
14623 /**
14624 * @opdone
14625 * @opmaps grp3_f6
14626 * @opcode /7
14627 * @opflclass division
14628 */
14629 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
14630 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14631 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
14632 break;
14633 }
14634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14635 }
14636}
14637
14638
14639/**
14640 * @opmaps grp3_f7
14641 * @opcode /0
14642 * @opflclass logical
14643 */
14644FNIEMOP_DEF_1(iemOp_grp3_test_Ev_Iz, uint8_t, bRm)
14645{
14646 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
14647 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14648 IEMOP_BODY_BINARY_Ev_Iz_RO(test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
14649}
14650
14651
14652/**
14653 * @opmaps grp3_f7
14654 * @opcode /2
14655 * @opflclass unchanged
14656 */
14657FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
14658{
14659/** @todo does not modify EFLAGS */
14660 IEMOP_MNEMONIC(not_Ev, "not Ev");
14661 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
14662 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
14663}
14664
14665
14666/**
14667 * @opmaps grp3_f7
14668 * @opcode /3
14669 * @opflclass arithmetic
14670 */
14671FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
14672{
14673 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
14674 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
14675 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
14676}
14677
14678
14679/**
14680 * @opmaps grp3_f7
14681 * @opcode /4
14682 * @opflclass multiply
14683 */
14684FNIEMOP_DEF_1(iemOp_grp3_mul_Ev, uint8_t, bRm)
14685{
14686 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
14687 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14688 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
14689}
14690
14691
14692/**
14693 * @opmaps grp3_f7
14694 * @opcode /5
14695 * @opflclass multiply
14696 */
14697FNIEMOP_DEF_1(iemOp_grp3_imul_Ev, uint8_t, bRm)
14698{
14699 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
14700 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14701 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
14702}
14703
14704
14705/**
14706 * @opmaps grp3_f7
14707 * @opcode /6
14708 * @opflclass division
14709 */
14710FNIEMOP_DEF_1(iemOp_grp3_div_Ev, uint8_t, bRm)
14711{
14712 IEMOP_MNEMONIC(div_Ev, "div Ev");
14713 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14714 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
14715}
14716
14717
14718/**
14719 * @opmaps grp3_f7
14720 * @opcode /7
14721 * @opflclass division
14722 */
14723FNIEMOP_DEF_1(iemOp_grp3_idiv_Ev, uint8_t, bRm)
14724{
14725 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
14726 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14727 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
14728}
14729
14730
14731/**
14732 * @opcode 0xf7
14733 */
14734FNIEMOP_DEF(iemOp_Grp3_Ev)
14735{
14736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14737 switch (IEM_GET_MODRM_REG_8(bRm))
14738 {
14739 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev_Iz, bRm);
14740 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev_Iz, bRm);
14741 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
14742 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
14743 case 4: return FNIEMOP_CALL_1(iemOp_grp3_mul_Ev, bRm);
14744 case 5: return FNIEMOP_CALL_1(iemOp_grp3_imul_Ev, bRm);
14745 case 6: return FNIEMOP_CALL_1(iemOp_grp3_div_Ev, bRm);
14746 case 7: return FNIEMOP_CALL_1(iemOp_grp3_idiv_Ev, bRm);
14747 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14748 }
14749}
14750
14751
14752/**
14753 * @opcode 0xf8
14754 * @opflmodify cf
14755 * @opflclear cf
14756 */
14757FNIEMOP_DEF(iemOp_clc)
14758{
14759 IEMOP_MNEMONIC(clc, "clc");
14760 IEM_MC_BEGIN(0, 0);
14761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14762 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
14763 IEM_MC_ADVANCE_RIP_AND_FINISH();
14764 IEM_MC_END();
14765}
14766
14767
14768/**
14769 * @opcode 0xf9
14770 * @opflmodify cf
14771 * @opflset cf
14772 */
14773FNIEMOP_DEF(iemOp_stc)
14774{
14775 IEMOP_MNEMONIC(stc, "stc");
14776 IEM_MC_BEGIN(0, 0);
14777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14778 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
14779 IEM_MC_ADVANCE_RIP_AND_FINISH();
14780 IEM_MC_END();
14781}
14782
14783
14784/**
14785 * @opcode 0xfa
14786 * @opfltest iopl,vm
14787 * @opflmodify if,vif
14788 */
14789FNIEMOP_DEF(iemOp_cli)
14790{
14791 IEMOP_MNEMONIC(cli, "cli");
14792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14793 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, 0, iemCImpl_cli);
14794}
14795
14796
14797/**
14798 * @opcode 0xfb
14799 * @opfltest iopl,vm
14800 * @opflmodify if,vif
14801 */
14802FNIEMOP_DEF(iemOp_sti)
14803{
14804 IEMOP_MNEMONIC(sti, "sti");
14805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14806 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
14807 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, 0, iemCImpl_sti);
14808}
14809
14810
14811/**
14812 * @opcode 0xfc
14813 * @opflmodify df
14814 * @opflclear df
14815 */
14816FNIEMOP_DEF(iemOp_cld)
14817{
14818 IEMOP_MNEMONIC(cld, "cld");
14819 IEM_MC_BEGIN(0, 0);
14820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14821 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
14822 IEM_MC_ADVANCE_RIP_AND_FINISH();
14823 IEM_MC_END();
14824}
14825
14826
14827/**
14828 * @opcode 0xfd
14829 * @opflmodify df
14830 * @opflset df
14831 */
14832FNIEMOP_DEF(iemOp_std)
14833{
14834 IEMOP_MNEMONIC(std, "std");
14835 IEM_MC_BEGIN(0, 0);
14836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14837 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
14838 IEM_MC_ADVANCE_RIP_AND_FINISH();
14839 IEM_MC_END();
14840}
14841
14842
14843/**
14844 * @opmaps grp4
14845 * @opcode /0
14846 * @opflclass incdec
14847 */
14848FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
14849{
14850 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
14851 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
14852}
14853
14854
14855/**
14856 * @opmaps grp4
14857 * @opcode /1
14858 * @opflclass incdec
14859 */
14860FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
14861{
14862 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
14863 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
14864}
14865
14866
14867/**
14868 * @opcode 0xfe
14869 */
14870FNIEMOP_DEF(iemOp_Grp4)
14871{
14872 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14873 switch (IEM_GET_MODRM_REG_8(bRm))
14874 {
14875 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
14876 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
14877 default:
14878 /** @todo is the eff-addr decoded? */
14879 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
14880 IEMOP_RAISE_INVALID_OPCODE_RET();
14881 }
14882}
14883
14884/**
14885 * @opmaps grp5
14886 * @opcode /0
14887 * @opflclass incdec
14888 */
14889FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
14890{
14891 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
14892 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
14893 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
14894}
14895
14896
14897/**
14898 * @opmaps grp5
14899 * @opcode /1
14900 * @opflclass incdec
14901 */
14902FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
14903{
14904 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
14905 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
14906 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
14907}
14908
14909
14910/**
14911 * Opcode 0xff /2.
14912 * @param bRm The RM byte.
14913 */
14914FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
14915{
14916 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
14917 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14918
14919 if (IEM_IS_MODRM_REG_MODE(bRm))
14920 {
14921 /* The new RIP is taken from a register. */
14922 switch (pVCpu->iem.s.enmEffOpSize)
14923 {
14924 case IEMMODE_16BIT:
14925 IEM_MC_BEGIN(0, 0);
14926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14927 IEM_MC_ARG(uint16_t, u16Target, 0);
14928 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14929 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
14930 IEM_MC_END();
14931 break;
14932
14933 case IEMMODE_32BIT:
14934 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
14935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14936 IEM_MC_ARG(uint32_t, u32Target, 0);
14937 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14938 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
14939 IEM_MC_END();
14940 break;
14941
14942 case IEMMODE_64BIT:
14943 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
14944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14945 IEM_MC_ARG(uint64_t, u64Target, 0);
14946 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14947 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
14948 IEM_MC_END();
14949 break;
14950
14951 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14952 }
14953 }
14954 else
14955 {
14956 /* The new RIP is taken from a register. */
14957 switch (pVCpu->iem.s.enmEffOpSize)
14958 {
14959 case IEMMODE_16BIT:
14960 IEM_MC_BEGIN(0, 0);
14961 IEM_MC_ARG(uint16_t, u16Target, 0);
14962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14965 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14966 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
14967 IEM_MC_END();
14968 break;
14969
14970 case IEMMODE_32BIT:
14971 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
14972 IEM_MC_ARG(uint32_t, u32Target, 0);
14973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14976 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14977 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
14978 IEM_MC_END();
14979 break;
14980
14981 case IEMMODE_64BIT:
14982 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
14983 IEM_MC_ARG(uint64_t, u64Target, 0);
14984 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14987 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14988 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
14989 IEM_MC_END();
14990 break;
14991
14992 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14993 }
14994 }
14995}
14996
14997#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl, a_fCImplExtra) \
14998 /* Registers? How?? */ \
14999 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
15000 { /* likely */ } \
15001 else \
15002 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
15003 \
15004 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
15005 /** @todo what does VIA do? */ \
15006 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
15007 { /* likely */ } \
15008 else \
15009 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
15010 \
15011 /* Far pointer loaded from memory. */ \
15012 switch (pVCpu->iem.s.enmEffOpSize) \
15013 { \
15014 case IEMMODE_16BIT: \
15015 IEM_MC_BEGIN(0, 0); \
15016 IEM_MC_ARG(uint16_t, u16Sel, 0); \
15017 IEM_MC_ARG(uint16_t, offSeg, 1); \
15018 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
15019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
15020 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
15021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
15022 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
15023 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
15024 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
15025 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
15026 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
15027 IEM_MC_END(); \
15028 break; \
15029 \
15030 case IEMMODE_32BIT: \
15031 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
15032 IEM_MC_ARG(uint16_t, u16Sel, 0); \
15033 IEM_MC_ARG(uint32_t, offSeg, 1); \
15034 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
15035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
15036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
15037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
15038 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
15039 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
15040 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
15041 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
15042 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
15043 IEM_MC_END(); \
15044 break; \
15045 \
15046 case IEMMODE_64BIT: \
15047 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
15048 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
15049 IEM_MC_ARG(uint16_t, u16Sel, 0); \
15050 IEM_MC_ARG(uint64_t, offSeg, 1); \
15051 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
15052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
15053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
15054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
15055 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
15056 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
15057 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
15058 | IEM_CIMPL_F_MODE /* no gates */, 0, \
15059 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
15060 IEM_MC_END(); \
15061 break; \
15062 \
15063 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
15064 } do {} while (0)
15065
15066
15067/**
15068 * Opcode 0xff /3.
15069 * @param bRm The RM byte.
15070 */
15071FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
15072{
15073 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
15074 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf, IEM_CIMPL_F_BRANCH_STACK);
15075}
15076
15077
15078/**
15079 * Opcode 0xff /4.
15080 * @param bRm The RM byte.
15081 */
15082FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
15083{
15084 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
15085 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
15086
15087 if (IEM_IS_MODRM_REG_MODE(bRm))
15088 {
15089 /* The new RIP is taken from a register. */
15090 switch (pVCpu->iem.s.enmEffOpSize)
15091 {
15092 case IEMMODE_16BIT:
15093 IEM_MC_BEGIN(0, 0);
15094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15095 IEM_MC_LOCAL(uint16_t, u16Target);
15096 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15097 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
15098 IEM_MC_END();
15099 break;
15100
15101 case IEMMODE_32BIT:
15102 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
15103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15104 IEM_MC_LOCAL(uint32_t, u32Target);
15105 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15106 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
15107 IEM_MC_END();
15108 break;
15109
15110 case IEMMODE_64BIT:
15111 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
15112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15113 IEM_MC_LOCAL(uint64_t, u64Target);
15114 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15115 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
15116 IEM_MC_END();
15117 break;
15118
15119 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15120 }
15121 }
15122 else
15123 {
15124 /* The new RIP is taken from a memory location. */
15125 switch (pVCpu->iem.s.enmEffOpSize)
15126 {
15127 case IEMMODE_16BIT:
15128 IEM_MC_BEGIN(0, 0);
15129 IEM_MC_LOCAL(uint16_t, u16Target);
15130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15133 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15134 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
15135 IEM_MC_END();
15136 break;
15137
15138 case IEMMODE_32BIT:
15139 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
15140 IEM_MC_LOCAL(uint32_t, u32Target);
15141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15144 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15145 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
15146 IEM_MC_END();
15147 break;
15148
15149 case IEMMODE_64BIT:
15150 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
15151 IEM_MC_LOCAL(uint64_t, u64Target);
15152 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15153 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15155 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15156 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
15157 IEM_MC_END();
15158 break;
15159
15160 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15161 }
15162 }
15163}
15164
15165
15166/**
15167 * Opcode 0xff /5.
15168 * @param bRm The RM byte.
15169 */
15170FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
15171{
15172 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
15173 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp, 0);
15174}
15175
15176
15177/**
15178 * Opcode 0xff /6.
15179 * @param bRm The RM byte.
15180 */
15181FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
15182{
15183 IEMOP_MNEMONIC(push_Ev, "push Ev");
15184
15185 /* Registers are handled by a common worker. */
15186 if (IEM_IS_MODRM_REG_MODE(bRm))
15187 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
15188
15189 /* Memory we do here. */
15190 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15191 switch (pVCpu->iem.s.enmEffOpSize)
15192 {
15193 case IEMMODE_16BIT:
15194 IEM_MC_BEGIN(0, 0);
15195 IEM_MC_LOCAL(uint16_t, u16Src);
15196 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15199 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15200 IEM_MC_PUSH_U16(u16Src);
15201 IEM_MC_ADVANCE_RIP_AND_FINISH();
15202 IEM_MC_END();
15203 break;
15204
15205 case IEMMODE_32BIT:
15206 IEM_MC_BEGIN(IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
15207 IEM_MC_LOCAL(uint32_t, u32Src);
15208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15211 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15212 IEM_MC_PUSH_U32(u32Src);
15213 IEM_MC_ADVANCE_RIP_AND_FINISH();
15214 IEM_MC_END();
15215 break;
15216
15217 case IEMMODE_64BIT:
15218 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
15219 IEM_MC_LOCAL(uint64_t, u64Src);
15220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15223 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15224 IEM_MC_PUSH_U64(u64Src);
15225 IEM_MC_ADVANCE_RIP_AND_FINISH();
15226 IEM_MC_END();
15227 break;
15228
15229 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15230 }
15231}
15232
15233
15234/**
15235 * @opcode 0xff
15236 */
15237FNIEMOP_DEF(iemOp_Grp5)
15238{
15239 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15240 switch (IEM_GET_MODRM_REG_8(bRm))
15241 {
15242 case 0:
15243 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
15244 case 1:
15245 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
15246 case 2:
15247 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
15248 case 3:
15249 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
15250 case 4:
15251 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
15252 case 5:
15253 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
15254 case 6:
15255 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
15256 case 7:
15257 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
15258 IEMOP_RAISE_INVALID_OPCODE_RET();
15259 }
15260 AssertFailedReturn(VERR_IEM_IPE_3);
15261}
15262
15263
15264
15265const PFNIEMOP g_apfnOneByteMap[256] =
15266{
15267 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
15268 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
15269 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
15270 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
15271 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
15272 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
15273 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
15274 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
15275 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
15276 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
15277 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
15278 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
15279 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
15280 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
15281 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
15282 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
15283 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
15284 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
15285 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
15286 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
15287 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
15288 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
15289 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
15290 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
15291 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
15292 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
15293 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
15294 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
15295 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
15296 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
15297 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
15298 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
15299 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
15300 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
15301 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
15302 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
15303 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
15304 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
15305 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
15306 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
15307 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
15308 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
15309 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
15310 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
15311 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
15312 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
15313 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
15314 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
15315 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
15316 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
15317 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
15318 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
15319 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
15320 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
15321 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
15322 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
15323 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
15324 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
15325 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
15326 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
15327 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
15328 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
15329 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
15330 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
15331};
15332
15333
15334/** @} */
15335
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette