VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 103772

Last change on this file since 103772 was 103772, checked in by vboxsync, 13 months ago

VMM/IEM: IEMOP_BODY_BINARY_Ev_Ib_RW/RO refactoring. No change. bugref:10376

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 587.4 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 103772 2024-03-11 15:27:51Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Special case body for bytes instruction like SUB and XOR that can be used
61 * to zero a register.
62 *
63 * This can be used both for the r8_rm and rm_r8 forms since it's working on the
64 * same register.
65 */
66#define IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(a_bRm) \
67 if ( (a_bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT)) \
68 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB) \
69 { \
70 IEM_MC_BEGIN(0, 1, 0, 0); \
71 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
72 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_REG(pVCpu, a_bRm), 0); \
73 IEM_MC_LOCAL(uint32_t, fEFlags); \
74 IEM_MC_FETCH_EFLAGS(fEFlags); \
75 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS); \
76 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF); \
77 IEM_MC_COMMIT_EFLAGS(fEFlags); \
78 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
79 IEM_MC_END(); \
80 } ((void)0)
81
82/**
83 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
84 * memory/register as the destination.
85 */
86#define IEMOP_BODY_BINARY_rm_r8_RW(a_bRm, a_InsNm, a_fRegRegNativeArchs, a_fMemRegNativeArchs) \
87 /* \
88 * If rm is denoting a register, no more instruction bytes. \
89 */ \
90 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
91 { \
92 IEM_MC_BEGIN(3, 0, 0, 0); \
93 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
94 IEM_MC_ARG(uint8_t, u8Src, 1); \
95 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
96 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
97 IEM_MC_LOCAL(uint8_t, u8Dst); \
98 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
99 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
100 IEM_MC_LOCAL(uint32_t, uEFlags); \
101 IEM_MC_FETCH_EFLAGS(uEFlags); \
102 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
103 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, a_bRm), u8Dst); \
104 IEM_MC_COMMIT_EFLAGS(uEFlags); \
105 } IEM_MC_NATIVE_ELSE() { \
106 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
107 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
108 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
109 IEM_MC_REF_EFLAGS(pEFlags); \
110 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
111 } IEM_MC_NATIVE_ENDIF(); \
112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
113 IEM_MC_END(); \
114 } \
115 else \
116 { \
117 /* \
118 * We're accessing memory. \
119 * Note! We're putting the eflags on the stack here so we can commit them \
120 * after the memory. \
121 */ \
122 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
123 { \
124 IEM_MC_BEGIN(3, 3, 0, 0); \
125 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
126 IEM_MC_ARG(uint8_t, u8Src, 1); \
127 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
129 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
130 \
131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
132 IEMOP_HLP_DONE_DECODING(); \
133 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
134 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
135 IEM_MC_FETCH_EFLAGS(EFlags); \
136 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
137 \
138 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
139 IEM_MC_COMMIT_EFLAGS(EFlags); \
140 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
141 IEM_MC_END(); \
142 } \
143 else \
144 { \
145 IEM_MC_BEGIN(3, 3, 0, 0); \
146 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
147 IEM_MC_ARG(uint8_t, u8Src, 1); \
148 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
150 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
151 \
152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
153 IEMOP_HLP_DONE_DECODING(); \
154 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
155 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
156 IEM_MC_FETCH_EFLAGS(EFlags); \
157 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8_locked), pu8Dst, u8Src, pEFlags); \
158 \
159 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bMapInfoDst); \
160 IEM_MC_COMMIT_EFLAGS(EFlags); \
161 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
162 IEM_MC_END(); \
163 } \
164 } \
165 (void)0
166
167/**
168 * Body for instructions like TEST & CMP with a byte memory/registers as
169 * operands.
170 */
171#define IEMOP_BODY_BINARY_rm_r8_RO(a_bRm, a_fnNormalU8, a_EmitterBasename, a_fNativeArchs) \
172 /* \
173 * If rm is denoting a register, no more instruction bytes. \
174 */ \
175 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
176 { \
177 IEM_MC_BEGIN(3, 0, 0, 0); \
178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
179 IEM_MC_ARG(uint8_t, u8Src, 1); \
180 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
181 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
182 IEM_MC_LOCAL(uint8_t, u8Dst); \
183 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
184 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
185 IEM_MC_LOCAL(uint32_t, uEFlags); \
186 IEM_MC_FETCH_EFLAGS(uEFlags); \
187 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
188 IEM_MC_COMMIT_EFLAGS(uEFlags); \
189 } IEM_MC_NATIVE_ELSE() { \
190 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
191 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
192 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
193 IEM_MC_REF_EFLAGS(pEFlags); \
194 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
195 } IEM_MC_NATIVE_ENDIF(); \
196 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
197 IEM_MC_END(); \
198 } \
199 else \
200 { \
201 /* \
202 * We're accessing memory. \
203 * Note! We're putting the eflags on the stack here so we can commit them \
204 * after the memory. \
205 */ \
206 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
207 { \
208 IEM_MC_BEGIN(3, 3, 0, 0); \
209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
211 IEMOP_HLP_DONE_DECODING(); \
212 IEM_MC_NATIVE_IF(0) { \
213 IEM_MC_LOCAL(uint8_t, u8Dst); \
214 IEM_MC_FETCH_MEM_U8(u8Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
215 IEM_MC_LOCAL(uint8_t, u8SrcEmit); \
216 IEM_MC_FETCH_GREG_U8(u8SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
217 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
218 IEM_MC_LOCAL(uint32_t, uEFlags); \
219 IEM_MC_FETCH_EFLAGS(uEFlags); \
220 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u8Dst, u8SrcEmit, uEFlags, 8); \
221 IEM_MC_COMMIT_EFLAGS(uEFlags); \
222 } IEM_MC_NATIVE_ELSE() { \
223 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
224 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
225 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
226 IEM_MC_ARG(uint8_t, u8Src, 1); \
227 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
228 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
229 IEM_MC_FETCH_EFLAGS(EFlags); \
230 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
231 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
232 IEM_MC_COMMIT_EFLAGS(EFlags); \
233 } IEM_MC_NATIVE_ENDIF(); \
234 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
235 IEM_MC_END(); \
236 } \
237 else \
238 { \
239 /** @todo we should probably decode the address first. */ \
240 IEMOP_HLP_DONE_DECODING(); \
241 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
242 } \
243 } \
244 (void)0
245
246/**
247 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
248 * destination.
249 */
250#define IEMOP_BODY_BINARY_r8_rm(a_bRm, a_InsNm, a_fNativeArchs) \
251 /* \
252 * If rm is denoting a register, no more instruction bytes. \
253 */ \
254 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
255 { \
256 IEM_MC_BEGIN(3, 0, 0, 0); \
257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
258 IEM_MC_ARG(uint8_t, u8Src, 1); \
259 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
260 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
261 IEM_MC_LOCAL(uint8_t, u8Dst); \
262 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
263 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
264 IEM_MC_LOCAL(uint32_t, uEFlags); \
265 IEM_MC_FETCH_EFLAGS(uEFlags); \
266 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
267 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, a_bRm), u8Dst); \
268 IEM_MC_COMMIT_EFLAGS(uEFlags); \
269 } IEM_MC_NATIVE_ELSE() { \
270 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
271 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
272 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
273 IEM_MC_REF_EFLAGS(pEFlags); \
274 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
275 } IEM_MC_NATIVE_ENDIF(); \
276 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
277 IEM_MC_END(); \
278 } \
279 else \
280 { \
281 /* \
282 * We're accessing memory. \
283 */ \
284 IEM_MC_BEGIN(3, 1, 0, 0); \
285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
288 IEM_MC_ARG(uint8_t, u8Src, 1); \
289 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
290 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
291 IEM_MC_LOCAL(uint8_t, u8Dst); \
292 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
293 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
294 IEM_MC_LOCAL(uint32_t, uEFlags); \
295 IEM_MC_FETCH_EFLAGS(uEFlags); \
296 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
297 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, a_bRm), u8Dst); \
298 IEM_MC_COMMIT_EFLAGS(uEFlags); \
299 } IEM_MC_NATIVE_ELSE() { \
300 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
301 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
302 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
303 IEM_MC_REF_EFLAGS(pEFlags); \
304 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
305 } IEM_MC_NATIVE_ENDIF(); \
306 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
307 IEM_MC_END(); \
308 } \
309 (void)0
310
311/**
312 * Body for byte instruction CMP with a register as the destination.
313 */
314#define IEMOP_BODY_BINARY_r8_rm_RO(a_bRm, a_InsNm, a_fNativeArchs) \
315 /* \
316 * If rm is denoting a register, no more instruction bytes. \
317 */ \
318 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
319 { \
320 IEM_MC_BEGIN(3, 0, 0, 0); \
321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
322 IEM_MC_ARG(uint8_t, u8Src, 1); \
323 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
324 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
325 IEM_MC_LOCAL(uint8_t, u8Dst); \
326 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
327 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
328 IEM_MC_LOCAL(uint32_t, uEFlags); \
329 IEM_MC_FETCH_EFLAGS(uEFlags); \
330 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
331 IEM_MC_COMMIT_EFLAGS(uEFlags); \
332 } IEM_MC_NATIVE_ELSE() { \
333 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
334 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
335 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
336 IEM_MC_REF_EFLAGS(pEFlags); \
337 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
338 } IEM_MC_NATIVE_ENDIF(); \
339 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
340 IEM_MC_END(); \
341 } \
342 else \
343 { \
344 /* \
345 * We're accessing memory. \
346 */ \
347 IEM_MC_BEGIN(3, 1, 0, 0); \
348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
351 IEM_MC_ARG(uint8_t, u8Src, 1); \
352 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
353 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
354 IEM_MC_LOCAL(uint8_t, u8Dst); \
355 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
356 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
357 IEM_MC_LOCAL(uint32_t, uEFlags); \
358 IEM_MC_FETCH_EFLAGS(uEFlags); \
359 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
360 IEM_MC_COMMIT_EFLAGS(uEFlags); \
361 } IEM_MC_NATIVE_ELSE() { \
362 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
363 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
364 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
365 IEM_MC_REF_EFLAGS(pEFlags); \
366 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
367 } IEM_MC_NATIVE_ENDIF(); \
368 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
369 IEM_MC_END(); \
370 } \
371 (void)0
372
373
374/**
375 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
376 * memory/register as the destination.
377 */
378#define IEMOP_BODY_BINARY_rm_rv_RW(a_bRm, a_InsNm, a_fRegRegNativeArchs, a_fMemRegNativeArchs) \
379 /* \
380 * If rm is denoting a register, no more instruction bytes. \
381 */ \
382 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
383 { \
384 switch (pVCpu->iem.s.enmEffOpSize) \
385 { \
386 case IEMMODE_16BIT: \
387 IEM_MC_BEGIN(3, 0, 0, 0); \
388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
389 IEM_MC_ARG(uint16_t, u16Src, 1); \
390 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
391 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
392 IEM_MC_LOCAL(uint16_t, u16Dst); \
393 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
394 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
395 IEM_MC_LOCAL(uint32_t, uEFlags); \
396 IEM_MC_FETCH_EFLAGS(uEFlags); \
397 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
398 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, a_bRm), u16Dst); \
399 IEM_MC_COMMIT_EFLAGS(uEFlags); \
400 } IEM_MC_NATIVE_ELSE() { \
401 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
402 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
403 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
404 IEM_MC_REF_EFLAGS(pEFlags); \
405 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
406 } IEM_MC_NATIVE_ENDIF(); \
407 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
408 IEM_MC_END(); \
409 break; \
410 \
411 case IEMMODE_32BIT: \
412 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
414 IEM_MC_ARG(uint32_t, u32Src, 1); \
415 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
416 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
417 IEM_MC_LOCAL(uint32_t, u32Dst); \
418 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
419 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
420 IEM_MC_LOCAL(uint32_t, uEFlags); \
421 IEM_MC_FETCH_EFLAGS(uEFlags); \
422 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
423 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, a_bRm), u32Dst); \
424 IEM_MC_COMMIT_EFLAGS(uEFlags); \
425 } IEM_MC_NATIVE_ELSE() { \
426 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
427 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
428 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
429 IEM_MC_REF_EFLAGS(pEFlags); \
430 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
431 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
432 } IEM_MC_NATIVE_ENDIF(); \
433 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
434 IEM_MC_END(); \
435 break; \
436 \
437 case IEMMODE_64BIT: \
438 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
440 IEM_MC_ARG(uint64_t, u64Src, 1); \
441 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
442 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
443 IEM_MC_LOCAL(uint64_t, u64Dst); \
444 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
445 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
446 IEM_MC_LOCAL(uint32_t, uEFlags); \
447 IEM_MC_FETCH_EFLAGS(uEFlags); \
448 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
449 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, a_bRm), u64Dst); \
450 IEM_MC_COMMIT_EFLAGS(uEFlags); \
451 } IEM_MC_NATIVE_ELSE() { \
452 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
453 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
454 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
455 IEM_MC_REF_EFLAGS(pEFlags); \
456 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
457 } IEM_MC_NATIVE_ENDIF(); \
458 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
459 IEM_MC_END(); \
460 break; \
461 \
462 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
463 } \
464 } \
465 else \
466 { \
467 /* \
468 * We're accessing memory. \
469 * Note! We're putting the eflags on the stack here so we can commit them \
470 * after the memory. \
471 */ \
472 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
473 { \
474 switch (pVCpu->iem.s.enmEffOpSize) \
475 { \
476 case IEMMODE_16BIT: \
477 IEM_MC_BEGIN(3, 3, 0, 0); \
478 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
479 IEM_MC_ARG(uint16_t, u16Src, 1); \
480 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
482 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
483 \
484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
485 IEMOP_HLP_DONE_DECODING(); \
486 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
487 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
488 IEM_MC_FETCH_EFLAGS(EFlags); \
489 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
490 \
491 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
492 IEM_MC_COMMIT_EFLAGS(EFlags); \
493 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
494 IEM_MC_END(); \
495 break; \
496 \
497 case IEMMODE_32BIT: \
498 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
499 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
500 IEM_MC_ARG(uint32_t, u32Src, 1); \
501 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
503 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
504 \
505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
506 IEMOP_HLP_DONE_DECODING(); \
507 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
508 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
509 IEM_MC_FETCH_EFLAGS(EFlags); \
510 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
511 \
512 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
513 IEM_MC_COMMIT_EFLAGS(EFlags); \
514 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
515 IEM_MC_END(); \
516 break; \
517 \
518 case IEMMODE_64BIT: \
519 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
520 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
521 IEM_MC_ARG(uint64_t, u64Src, 1); \
522 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
524 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
525 \
526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
527 IEMOP_HLP_DONE_DECODING(); \
528 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
529 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
530 IEM_MC_FETCH_EFLAGS(EFlags); \
531 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
532 \
533 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
534 IEM_MC_COMMIT_EFLAGS(EFlags); \
535 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
536 IEM_MC_END(); \
537 break; \
538 \
539 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
540 } \
541 } \
542 else \
543 { \
544 (void)0
545/* Separate macro to work around parsing issue in IEMAllInstPython.py */
546#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_bRm, a_InsNm) \
547 switch (pVCpu->iem.s.enmEffOpSize) \
548 { \
549 case IEMMODE_16BIT: \
550 IEM_MC_BEGIN(3, 3, 0, 0); \
551 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
552 IEM_MC_ARG(uint16_t, u16Src, 1); \
553 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
555 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
556 \
557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
558 IEMOP_HLP_DONE_DECODING(); \
559 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
560 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
561 IEM_MC_FETCH_EFLAGS(EFlags); \
562 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16_locked), pu16Dst, u16Src, pEFlags); \
563 \
564 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
565 IEM_MC_COMMIT_EFLAGS(EFlags); \
566 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
567 IEM_MC_END(); \
568 break; \
569 \
570 case IEMMODE_32BIT: \
571 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
572 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
573 IEM_MC_ARG(uint32_t, u32Src, 1); \
574 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
576 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
577 \
578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
579 IEMOP_HLP_DONE_DECODING(); \
580 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
581 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
582 IEM_MC_FETCH_EFLAGS(EFlags); \
583 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32_locked), pu32Dst, u32Src, pEFlags); \
584 \
585 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo /* CMP,TEST */); \
586 IEM_MC_COMMIT_EFLAGS(EFlags); \
587 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
588 IEM_MC_END(); \
589 break; \
590 \
591 case IEMMODE_64BIT: \
592 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
593 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
594 IEM_MC_ARG(uint64_t, u64Src, 1); \
595 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
597 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
598 \
599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
600 IEMOP_HLP_DONE_DECODING(); \
601 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
602 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
603 IEM_MC_FETCH_EFLAGS(EFlags); \
604 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64_locked), pu64Dst, u64Src, pEFlags); \
605 \
606 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
607 IEM_MC_COMMIT_EFLAGS(EFlags); \
608 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
609 IEM_MC_END(); \
610 break; \
611 \
612 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
613 } \
614 } \
615 } \
616 (void)0
617
618/**
619 * Body for read-only word/dword/qword instructions like TEST and CMP with
620 * memory/register as the destination.
621 */
622#define IEMOP_BODY_BINARY_rm_rv_RO(a_bRm, a_InsNm, a_fNativeArchs) \
623 /* \
624 * If rm is denoting a register, no more instruction bytes. \
625 */ \
626 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
627 { \
628 switch (pVCpu->iem.s.enmEffOpSize) \
629 { \
630 case IEMMODE_16BIT: \
631 IEM_MC_BEGIN(3, 0, 0, 0); \
632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
633 IEM_MC_ARG(uint16_t, u16Src, 1); \
634 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
635 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
636 IEM_MC_LOCAL(uint16_t, u16Dst); \
637 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
638 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
639 IEM_MC_LOCAL(uint32_t, uEFlags); \
640 IEM_MC_FETCH_EFLAGS(uEFlags); \
641 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
642 IEM_MC_COMMIT_EFLAGS(uEFlags); \
643 } IEM_MC_NATIVE_ELSE() { \
644 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
645 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
646 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
647 IEM_MC_REF_EFLAGS(pEFlags); \
648 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
649 } IEM_MC_NATIVE_ENDIF(); \
650 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
651 IEM_MC_END(); \
652 break; \
653 \
654 case IEMMODE_32BIT: \
655 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
657 IEM_MC_ARG(uint32_t, u32Src, 1); \
658 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
659 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
660 IEM_MC_LOCAL(uint32_t, u32Dst); \
661 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
662 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
663 IEM_MC_LOCAL(uint32_t, uEFlags); \
664 IEM_MC_FETCH_EFLAGS(uEFlags); \
665 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
666 IEM_MC_COMMIT_EFLAGS(uEFlags); \
667 } IEM_MC_NATIVE_ELSE() { \
668 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
669 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
670 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
671 IEM_MC_REF_EFLAGS(pEFlags); \
672 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
673 } IEM_MC_NATIVE_ENDIF(); \
674 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
675 IEM_MC_END(); \
676 break; \
677 \
678 case IEMMODE_64BIT: \
679 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
681 IEM_MC_ARG(uint64_t, u64Src, 1); \
682 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
683 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
684 IEM_MC_LOCAL(uint64_t, u64Dst); \
685 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
686 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
687 IEM_MC_LOCAL(uint32_t, uEFlags); \
688 IEM_MC_FETCH_EFLAGS(uEFlags); \
689 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
690 IEM_MC_COMMIT_EFLAGS(uEFlags); \
691 } IEM_MC_NATIVE_ELSE() { \
692 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
693 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
694 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
695 IEM_MC_REF_EFLAGS(pEFlags); \
696 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
697 } IEM_MC_NATIVE_ENDIF(); \
698 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
699 IEM_MC_END(); \
700 break; \
701 \
702 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
703 } \
704 } \
705 else \
706 { \
707 /* \
708 * We're accessing memory. \
709 * Note! We're putting the eflags on the stack here so we can commit them \
710 * after the memory. \
711 */ \
712 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
713 { \
714 switch (pVCpu->iem.s.enmEffOpSize) \
715 { \
716 case IEMMODE_16BIT: \
717 IEM_MC_BEGIN(3, 3, 0, 0); \
718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
720 IEMOP_HLP_DONE_DECODING(); \
721 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
722 IEM_MC_LOCAL(uint16_t, u16Dst); \
723 IEM_MC_FETCH_MEM_U16(u16Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
724 IEM_MC_LOCAL(uint16_t, u16SrcEmit); \
725 IEM_MC_FETCH_GREG_U16(u16SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
726 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
727 IEM_MC_LOCAL(uint32_t, uEFlags); \
728 IEM_MC_FETCH_EFLAGS(uEFlags); \
729 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u16Dst, u16SrcEmit, uEFlags, 16); \
730 IEM_MC_COMMIT_EFLAGS(uEFlags); \
731 } IEM_MC_NATIVE_ELSE() { \
732 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
733 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
734 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
735 IEM_MC_ARG(uint16_t, u16Src, 1); \
736 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
737 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
738 IEM_MC_FETCH_EFLAGS(EFlags); \
739 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
740 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
741 IEM_MC_COMMIT_EFLAGS(EFlags); \
742 } IEM_MC_NATIVE_ENDIF(); \
743 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
744 IEM_MC_END(); \
745 break; \
746 \
747 case IEMMODE_32BIT: \
748 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
751 IEMOP_HLP_DONE_DECODING(); \
752 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
753 IEM_MC_LOCAL(uint32_t, u32Dst); \
754 IEM_MC_FETCH_MEM_U32(u32Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
755 IEM_MC_LOCAL(uint32_t, u32SrcEmit); \
756 IEM_MC_FETCH_GREG_U32(u32SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
757 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
758 IEM_MC_LOCAL(uint32_t, uEFlags); \
759 IEM_MC_FETCH_EFLAGS(uEFlags); \
760 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u32Dst, u32SrcEmit, uEFlags, 32); \
761 IEM_MC_COMMIT_EFLAGS(uEFlags); \
762 } IEM_MC_NATIVE_ELSE() { \
763 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
764 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
765 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
766 IEM_MC_ARG(uint32_t, u32Src, 1); \
767 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
768 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
769 IEM_MC_FETCH_EFLAGS(EFlags); \
770 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
771 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
772 IEM_MC_COMMIT_EFLAGS(EFlags); \
773 } IEM_MC_NATIVE_ENDIF(); \
774 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
775 IEM_MC_END(); \
776 break; \
777 \
778 case IEMMODE_64BIT: \
779 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
782 IEMOP_HLP_DONE_DECODING(); \
783 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
784 IEM_MC_LOCAL(uint64_t, u64Dst); \
785 IEM_MC_FETCH_MEM_U64(u64Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
786 IEM_MC_LOCAL(uint64_t, u64SrcEmit); \
787 IEM_MC_FETCH_GREG_U64(u64SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
788 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
789 IEM_MC_LOCAL(uint32_t, uEFlags); \
790 IEM_MC_FETCH_EFLAGS(uEFlags); \
791 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u64Dst, u64SrcEmit, uEFlags, 64); \
792 IEM_MC_COMMIT_EFLAGS(uEFlags); \
793 } IEM_MC_NATIVE_ELSE() { \
794 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
795 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
796 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
797 IEM_MC_ARG(uint64_t, u64Src, 1); \
798 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
799 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
800 IEM_MC_FETCH_EFLAGS(EFlags); \
801 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
802 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
803 IEM_MC_COMMIT_EFLAGS(EFlags); \
804 } IEM_MC_NATIVE_ENDIF(); \
805 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
806 IEM_MC_END(); \
807 break; \
808 \
809 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
810 } \
811 } \
812 else \
813 { \
814 IEMOP_HLP_DONE_DECODING(); \
815 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
816 } \
817 } \
818 (void)0
819
820
821/**
822 * Body for instructions like ADD, AND, OR, ++ with working on AL with
823 * a byte immediate.
824 */
825#define IEMOP_BODY_BINARY_AL_Ib(a_InsNm, a_fNativeArchs) \
826 IEM_MC_BEGIN(3, 3, 0, 0); \
827 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
829 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
830 IEM_MC_LOCAL(uint8_t, u8Dst); \
831 IEM_MC_FETCH_GREG_U8(u8Dst, X86_GREG_xAX); \
832 IEM_MC_LOCAL(uint32_t, uEFlags); \
833 IEM_MC_FETCH_EFLAGS(uEFlags); \
834 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u8Dst, u8Imm, uEFlags, 8, 8); \
835 IEM_MC_COMMIT_EFLAGS(uEFlags); \
836 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Dst); \
837 } IEM_MC_NATIVE_ELSE() { \
838 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
839 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
840 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
841 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
842 IEM_MC_REF_EFLAGS(pEFlags); \
843 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
844 } IEM_MC_NATIVE_ENDIF(); \
845 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
846 IEM_MC_END()
847
848/**
849 * Body for instructions like ADD, AND, OR, ++ with working on
850 * AX/EAX/RAX with a word/dword immediate.
851 */
852#define IEMOP_BODY_BINARY_rAX_Iz_RW(a_InsNm, a_fNativeArchs) \
853 switch (pVCpu->iem.s.enmEffOpSize) \
854 { \
855 case IEMMODE_16BIT: \
856 { \
857 IEM_MC_BEGIN(3, 2, 0, 0); \
858 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
860 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
861 IEM_MC_LOCAL(uint16_t, u16Dst); \
862 IEM_MC_FETCH_GREG_U16(u16Dst, X86_GREG_xAX); \
863 IEM_MC_LOCAL(uint32_t, uEFlags); \
864 IEM_MC_FETCH_EFLAGS(uEFlags); \
865 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
866 IEM_MC_COMMIT_EFLAGS(uEFlags); \
867 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Dst); \
868 } IEM_MC_NATIVE_ELSE() { \
869 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
870 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
871 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
872 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
873 IEM_MC_REF_EFLAGS(pEFlags); \
874 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
875 } IEM_MC_NATIVE_ENDIF(); \
876 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
877 IEM_MC_END(); \
878 } \
879 \
880 case IEMMODE_32BIT: \
881 { \
882 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0); \
883 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
885 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
886 IEM_MC_LOCAL(uint32_t, u32Dst); \
887 IEM_MC_FETCH_GREG_U32(u32Dst, X86_GREG_xAX); \
888 IEM_MC_LOCAL(uint32_t, uEFlags); \
889 IEM_MC_FETCH_EFLAGS(uEFlags); \
890 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
891 IEM_MC_COMMIT_EFLAGS(uEFlags); \
892 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Dst); \
893 } IEM_MC_NATIVE_ELSE() { \
894 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
895 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
896 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
897 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
898 IEM_MC_REF_EFLAGS(pEFlags); \
899 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
900 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
901 } IEM_MC_NATIVE_ENDIF(); \
902 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
903 IEM_MC_END(); \
904 } \
905 \
906 case IEMMODE_64BIT: \
907 { \
908 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0); \
909 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
911 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
912 IEM_MC_LOCAL(uint64_t, u64Dst); \
913 IEM_MC_FETCH_GREG_U64(u64Dst, X86_GREG_xAX); \
914 IEM_MC_LOCAL(uint32_t, uEFlags); \
915 IEM_MC_FETCH_EFLAGS(uEFlags); \
916 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
917 IEM_MC_COMMIT_EFLAGS(uEFlags); \
918 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Dst); \
919 } IEM_MC_NATIVE_ELSE() { \
920 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
921 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
922 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
923 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
924 IEM_MC_REF_EFLAGS(pEFlags); \
925 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
926 } IEM_MC_NATIVE_ENDIF(); \
927 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
928 IEM_MC_END(); \
929 } \
930 \
931 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
932 } \
933 (void)0
934
935/**
936 * Body for the instructions CMP and TEST working on AX/EAX/RAX with a
937 * word/dword immediate.
938 */
939#define IEMOP_BODY_BINARY_rAX_Iz_RO(a_InsNm, a_fNativeArchs) \
940 switch (pVCpu->iem.s.enmEffOpSize) \
941 { \
942 case IEMMODE_16BIT: \
943 { \
944 IEM_MC_BEGIN(3, 2, 0, 0); \
945 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
947 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
948 IEM_MC_LOCAL(uint16_t, u16Dst); \
949 IEM_MC_FETCH_GREG_U16(u16Dst, X86_GREG_xAX); \
950 IEM_MC_LOCAL(uint32_t, uEFlags); \
951 IEM_MC_FETCH_EFLAGS(uEFlags); \
952 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
953 IEM_MC_COMMIT_EFLAGS(uEFlags); \
954 } IEM_MC_NATIVE_ELSE() { \
955 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
956 IEM_MC_ARG(uint16_t const *,pu16Dst, 0); \
957 IEM_MC_REF_GREG_U16_CONST(pu16Dst, X86_GREG_xAX); \
958 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
959 IEM_MC_REF_EFLAGS(pEFlags); \
960 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
961 } IEM_MC_NATIVE_ENDIF(); \
962 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
963 IEM_MC_END(); \
964 } \
965 \
966 case IEMMODE_32BIT: \
967 { \
968 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0); \
969 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
971 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
972 IEM_MC_LOCAL(uint32_t, u32Dst); \
973 IEM_MC_FETCH_GREG_U32(u32Dst, X86_GREG_xAX); \
974 IEM_MC_LOCAL(uint32_t, uEFlags); \
975 IEM_MC_FETCH_EFLAGS(uEFlags); \
976 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
977 IEM_MC_COMMIT_EFLAGS(uEFlags); \
978 } IEM_MC_NATIVE_ELSE() { \
979 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
980 IEM_MC_ARG(uint32_t const *,pu32Dst, 0); \
981 IEM_MC_REF_GREG_U32_CONST(pu32Dst, X86_GREG_xAX); \
982 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
983 IEM_MC_REF_EFLAGS(pEFlags); \
984 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
985 } IEM_MC_NATIVE_ENDIF(); \
986 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
987 IEM_MC_END(); \
988 } \
989 \
990 case IEMMODE_64BIT: \
991 { \
992 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0); \
993 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
995 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
996 IEM_MC_LOCAL(uint64_t, u64Dst); \
997 IEM_MC_FETCH_GREG_U64(u64Dst, X86_GREG_xAX); \
998 IEM_MC_LOCAL(uint32_t, uEFlags); \
999 IEM_MC_FETCH_EFLAGS(uEFlags); \
1000 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
1001 IEM_MC_COMMIT_EFLAGS(uEFlags); \
1002 } IEM_MC_NATIVE_ELSE() { \
1003 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
1004 IEM_MC_ARG(uint64_t const *,pu64Dst, 0); \
1005 IEM_MC_REF_GREG_U64_CONST(pu64Dst, X86_GREG_xAX); \
1006 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
1007 IEM_MC_REF_EFLAGS(pEFlags); \
1008 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
1009 } IEM_MC_NATIVE_ENDIF(); \
1010 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1011 IEM_MC_END(); \
1012 } \
1013 \
1014 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1015 } \
1016 (void)0
1017
1018
1019
1020/* Instruction specification format - work in progress: */
1021
1022/**
1023 * @opcode 0x00
1024 * @opmnemonic add
1025 * @op1 rm:Eb
1026 * @op2 reg:Gb
1027 * @opmaps one
1028 * @openc ModR/M
1029 * @opflclass arithmetic
1030 * @ophints harmless ignores_op_sizes
1031 * @opstats add_Eb_Gb
1032 * @opgroup og_gen_arith_bin
1033 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1034 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
1035 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
1036 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
1037 */
1038FNIEMOP_DEF(iemOp_add_Eb_Gb)
1039{
1040 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1041 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1042 IEMOP_BODY_BINARY_rm_r8_RW(bRm, add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1043}
1044
1045
1046/**
1047 * @opcode 0x01
1048 * @opgroup og_gen_arith_bin
1049 * @opflclass arithmetic
1050 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1051 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
1052 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
1053 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
1054 */
1055FNIEMOP_DEF(iemOp_add_Ev_Gv)
1056{
1057 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1058 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1059 IEMOP_BODY_BINARY_rm_rv_RW( bRm, add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1060 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, add);
1061}
1062
1063
1064/**
1065 * @opcode 0x02
1066 * @opgroup og_gen_arith_bin
1067 * @opflclass arithmetic
1068 * @opcopytests iemOp_add_Eb_Gb
1069 */
1070FNIEMOP_DEF(iemOp_add_Gb_Eb)
1071{
1072 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1073 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1074 IEMOP_BODY_BINARY_r8_rm(bRm, add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1075}
1076
1077
1078/**
1079 * @opcode 0x03
1080 * @opgroup og_gen_arith_bin
1081 * @opflclass arithmetic
1082 * @opcopytests iemOp_add_Ev_Gv
1083 */
1084FNIEMOP_DEF(iemOp_add_Gv_Ev)
1085{
1086 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1087 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1088 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 0, add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1089}
1090
1091
1092/**
1093 * @opcode 0x04
1094 * @opgroup og_gen_arith_bin
1095 * @opflclass arithmetic
1096 * @opcopytests iemOp_add_Eb_Gb
1097 */
1098FNIEMOP_DEF(iemOp_add_Al_Ib)
1099{
1100 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1101 IEMOP_BODY_BINARY_AL_Ib(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1102}
1103
1104
1105/**
1106 * @opcode 0x05
1107 * @opgroup og_gen_arith_bin
1108 * @opflclass arithmetic
1109 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
1110 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
1111 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
1112 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
1113 */
1114FNIEMOP_DEF(iemOp_add_eAX_Iz)
1115{
1116 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1117 IEMOP_BODY_BINARY_rAX_Iz_RW(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1118}
1119
1120
1121/**
1122 * @opcode 0x06
1123 * @opgroup og_stack_sreg
1124 */
1125FNIEMOP_DEF(iemOp_push_ES)
1126{
1127 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1128 IEMOP_HLP_NO_64BIT();
1129 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
1130}
1131
1132
1133/**
1134 * @opcode 0x07
1135 * @opgroup og_stack_sreg
1136 */
1137FNIEMOP_DEF(iemOp_pop_ES)
1138{
1139 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1140 IEMOP_HLP_NO_64BIT();
1141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1142 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1143 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1144 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
1145 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
1146 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
1147 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES),
1148 iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
1149}
1150
1151
1152/**
1153 * @opcode 0x08
1154 * @opgroup og_gen_arith_bin
1155 * @opflclass logical
1156 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
1157 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
1158 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
1159 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
1160 */
1161FNIEMOP_DEF(iemOp_or_Eb_Gb)
1162{
1163 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1164 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1166 IEMOP_BODY_BINARY_rm_r8_RW(bRm, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1167}
1168
1169
1170/*
1171 * @opcode 0x09
1172 * @opgroup og_gen_arith_bin
1173 * @opflclass logical
1174 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
1175 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
1176 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1177 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1178 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1179 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1180 * @note AF is documented as undefined, but both modern AMD and Intel CPUs clears it.
1181 */
1182FNIEMOP_DEF(iemOp_or_Ev_Gv)
1183{
1184 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1185 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1186 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1187 IEMOP_BODY_BINARY_rm_rv_RW( bRm, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1188 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, or);
1189}
1190
1191
1192/**
1193 * @opcode 0x0a
1194 * @opgroup og_gen_arith_bin
1195 * @opflclass logical
1196 * @opcopytests iemOp_or_Eb_Gb
1197 */
1198FNIEMOP_DEF(iemOp_or_Gb_Eb)
1199{
1200 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1201 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1202 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1203 IEMOP_BODY_BINARY_r8_rm(bRm, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1204}
1205
1206
1207/**
1208 * @opcode 0x0b
1209 * @opgroup og_gen_arith_bin
1210 * @opflclass logical
1211 * @opcopytests iemOp_or_Ev_Gv
1212 */
1213FNIEMOP_DEF(iemOp_or_Gv_Ev)
1214{
1215 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1216 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1217 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1218 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 0, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1219}
1220
1221
1222/**
1223 * @opcode 0x0c
1224 * @opgroup og_gen_arith_bin
1225 * @opflclass logical
1226 * @opcopytests iemOp_or_Eb_Gb
1227 */
1228FNIEMOP_DEF(iemOp_or_Al_Ib)
1229{
1230 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1231 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1232 IEMOP_BODY_BINARY_AL_Ib(or, 0);
1233}
1234
1235
1236/**
1237 * @opcode 0x0d
1238 * @opgroup og_gen_arith_bin
1239 * @opflclass logical
1240 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
1241 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
1242 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1243 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1244 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1245 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1246 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
1247 */
1248FNIEMOP_DEF(iemOp_or_eAX_Iz)
1249{
1250 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1251 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1252 IEMOP_BODY_BINARY_rAX_Iz_RW(or, 0);
1253}
1254
1255
1256/**
1257 * @opcode 0x0e
1258 * @opgroup og_stack_sreg
1259 */
1260FNIEMOP_DEF(iemOp_push_CS)
1261{
1262 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
1263 IEMOP_HLP_NO_64BIT();
1264 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
1265}
1266
1267
1268/**
1269 * @opcode 0x0f
1270 * @opmnemonic EscTwo0f
1271 * @openc two0f
1272 * @opdisenum OP_2B_ESC
1273 * @ophints harmless
1274 * @opgroup og_escapes
1275 */
1276FNIEMOP_DEF(iemOp_2byteEscape)
1277{
1278#if 0 /// @todo def VBOX_STRICT
1279 /* Sanity check the table the first time around. */
1280 static bool s_fTested = false;
1281 if (RT_LIKELY(s_fTested)) { /* likely */ }
1282 else
1283 {
1284 s_fTested = true;
1285 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
1286 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
1287 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
1288 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
1289 }
1290#endif
1291
1292 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
1293 {
1294 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1295 IEMOP_HLP_MIN_286();
1296 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
1297 }
1298 /* @opdone */
1299
1300 /*
1301 * On the 8086 this is a POP CS instruction.
1302 * For the time being we don't specify this this.
1303 */
1304 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
1305 IEMOP_HLP_NO_64BIT();
1306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1307 /** @todo eliminate END_TB here */
1308 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1309 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1310 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_CS),
1311 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1312}
1313
1314/**
1315 * @opcode 0x10
1316 * @opgroup og_gen_arith_bin
1317 * @opflclass arithmetic_carry
1318 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1319 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1320 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1321 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1322 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1323 */
1324FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1325{
1326 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1327 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1328 IEMOP_BODY_BINARY_rm_r8_RW(bRm, adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1329}
1330
1331
1332/**
1333 * @opcode 0x11
1334 * @opgroup og_gen_arith_bin
1335 * @opflclass arithmetic_carry
1336 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1337 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1338 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1339 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1340 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1341 */
1342FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1343{
1344 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1345 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1346 IEMOP_BODY_BINARY_rm_rv_RW( bRm, adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1347 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, adc);
1348}
1349
1350
1351/**
1352 * @opcode 0x12
1353 * @opgroup og_gen_arith_bin
1354 * @opflclass arithmetic_carry
1355 * @opcopytests iemOp_adc_Eb_Gb
1356 */
1357FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1358{
1359 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1360 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1361 IEMOP_BODY_BINARY_r8_rm(bRm, adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1362}
1363
1364
1365/**
1366 * @opcode 0x13
1367 * @opgroup og_gen_arith_bin
1368 * @opflclass arithmetic_carry
1369 * @opcopytests iemOp_adc_Ev_Gv
1370 */
1371FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1372{
1373 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1374 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1375 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 0, adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1376}
1377
1378
1379/**
1380 * @opcode 0x14
1381 * @opgroup og_gen_arith_bin
1382 * @opflclass arithmetic_carry
1383 * @opcopytests iemOp_adc_Eb_Gb
1384 */
1385FNIEMOP_DEF(iemOp_adc_Al_Ib)
1386{
1387 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1388 IEMOP_BODY_BINARY_AL_Ib(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1389}
1390
1391
1392/**
1393 * @opcode 0x15
1394 * @opgroup og_gen_arith_bin
1395 * @opflclass arithmetic_carry
1396 * @opcopytests iemOp_adc_Ev_Gv
1397 */
1398FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1399{
1400 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1401 IEMOP_BODY_BINARY_rAX_Iz_RW(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1402}
1403
1404
1405/**
1406 * @opcode 0x16
1407 */
1408FNIEMOP_DEF(iemOp_push_SS)
1409{
1410 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1411 IEMOP_HLP_NO_64BIT();
1412 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1413}
1414
1415
1416/**
1417 * @opcode 0x17
1418 */
1419FNIEMOP_DEF(iemOp_pop_SS)
1420{
1421 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1423 IEMOP_HLP_NO_64BIT();
1424 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1425 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1426 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_SS)
1427 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_SS)
1428 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_SS)
1429 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_SS),
1430 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1431}
1432
1433
1434/**
1435 * @opcode 0x18
1436 * @opgroup og_gen_arith_bin
1437 * @opflclass arithmetic_carry
1438 */
1439FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1440{
1441 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1442 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1443 IEMOP_BODY_BINARY_rm_r8_RW(bRm, sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1444}
1445
1446
1447/**
1448 * @opcode 0x19
1449 * @opgroup og_gen_arith_bin
1450 * @opflclass arithmetic_carry
1451 */
1452FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1453{
1454 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1456 IEMOP_BODY_BINARY_rm_rv_RW( bRm, sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1457 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, sbb);
1458}
1459
1460
1461/**
1462 * @opcode 0x1a
1463 * @opgroup og_gen_arith_bin
1464 * @opflclass arithmetic_carry
1465 */
1466FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1467{
1468 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1470 IEMOP_BODY_BINARY_r8_rm(bRm, sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1471}
1472
1473
1474/**
1475 * @opcode 0x1b
1476 * @opgroup og_gen_arith_bin
1477 * @opflclass arithmetic_carry
1478 */
1479FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1480{
1481 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1482 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1483 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 0, sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1484}
1485
1486
1487/**
1488 * @opcode 0x1c
1489 * @opgroup og_gen_arith_bin
1490 * @opflclass arithmetic_carry
1491 */
1492FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1493{
1494 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1495 IEMOP_BODY_BINARY_AL_Ib(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1496}
1497
1498
1499/**
1500 * @opcode 0x1d
1501 * @opgroup og_gen_arith_bin
1502 * @opflclass arithmetic_carry
1503 */
1504FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1505{
1506 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1507 IEMOP_BODY_BINARY_rAX_Iz_RW(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1508}
1509
1510
1511/**
1512 * @opcode 0x1e
1513 * @opgroup og_stack_sreg
1514 */
1515FNIEMOP_DEF(iemOp_push_DS)
1516{
1517 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1518 IEMOP_HLP_NO_64BIT();
1519 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1520}
1521
1522
1523/**
1524 * @opcode 0x1f
1525 * @opgroup og_stack_sreg
1526 */
1527FNIEMOP_DEF(iemOp_pop_DS)
1528{
1529 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1531 IEMOP_HLP_NO_64BIT();
1532 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1533 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1534 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
1535 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
1536 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
1537 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS),
1538 iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1539}
1540
1541
1542/**
1543 * @opcode 0x20
1544 * @opgroup og_gen_arith_bin
1545 * @opflclass logical
1546 */
1547FNIEMOP_DEF(iemOp_and_Eb_Gb)
1548{
1549 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1550 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1551 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1552 IEMOP_BODY_BINARY_rm_r8_RW(bRm, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1553}
1554
1555
1556/**
1557 * @opcode 0x21
1558 * @opgroup og_gen_arith_bin
1559 * @opflclass logical
1560 */
1561FNIEMOP_DEF(iemOp_and_Ev_Gv)
1562{
1563 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1564 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1565 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1566 IEMOP_BODY_BINARY_rm_rv_RW( bRm, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1567 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, and);
1568}
1569
1570
1571/**
1572 * @opcode 0x22
1573 * @opgroup og_gen_arith_bin
1574 * @opflclass logical
1575 */
1576FNIEMOP_DEF(iemOp_and_Gb_Eb)
1577{
1578 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1579 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1581 IEMOP_BODY_BINARY_r8_rm(bRm, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1582}
1583
1584
1585/**
1586 * @opcode 0x23
1587 * @opgroup og_gen_arith_bin
1588 * @opflclass logical
1589 */
1590FNIEMOP_DEF(iemOp_and_Gv_Ev)
1591{
1592 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1593 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1594 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1595 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 0, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1596}
1597
1598
1599/**
1600 * @opcode 0x24
1601 * @opgroup og_gen_arith_bin
1602 * @opflclass logical
1603 */
1604FNIEMOP_DEF(iemOp_and_Al_Ib)
1605{
1606 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1607 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1608 IEMOP_BODY_BINARY_AL_Ib(and, 0);
1609}
1610
1611
1612/**
1613 * @opcode 0x25
1614 * @opgroup og_gen_arith_bin
1615 * @opflclass logical
1616 */
1617FNIEMOP_DEF(iemOp_and_eAX_Iz)
1618{
1619 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1620 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1621 IEMOP_BODY_BINARY_rAX_Iz_RW(and, 0);
1622}
1623
1624
1625/**
1626 * @opcode 0x26
1627 * @opmnemonic SEG
1628 * @op1 ES
1629 * @opgroup og_prefix
1630 * @openc prefix
1631 * @opdisenum OP_SEG
1632 * @ophints harmless
1633 */
1634FNIEMOP_DEF(iemOp_seg_ES)
1635{
1636 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1637 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1638 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1639
1640 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1641 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1642}
1643
1644
1645/**
1646 * @opcode 0x27
1647 * @opfltest af,cf
1648 * @opflmodify cf,pf,af,zf,sf,of
1649 * @opflundef of
1650 */
1651FNIEMOP_DEF(iemOp_daa)
1652{
1653 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1654 IEMOP_HLP_NO_64BIT();
1655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1656 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1657 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_daa);
1658}
1659
1660
1661/**
1662 * @opcode 0x28
1663 * @opgroup og_gen_arith_bin
1664 * @opflclass arithmetic
1665 */
1666FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1667{
1668 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1670 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */
1671 IEMOP_BODY_BINARY_rm_r8_RW(bRm, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1672}
1673
1674
1675/**
1676 * @opcode 0x29
1677 * @opgroup og_gen_arith_bin
1678 * @opflclass arithmetic
1679 */
1680FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1681{
1682 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1684 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */
1685 IEMOP_BODY_BINARY_rm_rv_RW( bRm, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1686 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, sub);
1687}
1688
1689
1690/**
1691 * @opcode 0x2a
1692 * @opgroup og_gen_arith_bin
1693 * @opflclass arithmetic
1694 */
1695FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1696{
1697 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1698 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1699 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */
1700 IEMOP_BODY_BINARY_r8_rm(bRm, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1701}
1702
1703
1704/**
1705 * @opcode 0x2b
1706 * @opgroup og_gen_arith_bin
1707 * @opflclass arithmetic
1708 */
1709FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1710{
1711 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1712 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1713 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */
1714 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 0, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1715}
1716
1717
1718/**
1719 * @opcode 0x2c
1720 * @opgroup og_gen_arith_bin
1721 * @opflclass arithmetic
1722 */
1723FNIEMOP_DEF(iemOp_sub_Al_Ib)
1724{
1725 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1726 IEMOP_BODY_BINARY_AL_Ib(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1727}
1728
1729
1730/**
1731 * @opcode 0x2d
1732 * @opgroup og_gen_arith_bin
1733 * @opflclass arithmetic
1734 */
1735FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1736{
1737 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1738 IEMOP_BODY_BINARY_rAX_Iz_RW(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1739}
1740
1741
1742/**
1743 * @opcode 0x2e
1744 * @opmnemonic SEG
1745 * @op1 CS
1746 * @opgroup og_prefix
1747 * @openc prefix
1748 * @opdisenum OP_SEG
1749 * @ophints harmless
1750 */
1751FNIEMOP_DEF(iemOp_seg_CS)
1752{
1753 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1754 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1755 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1756
1757 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1758 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1759}
1760
1761
1762/**
1763 * @opcode 0x2f
1764 * @opfltest af,cf
1765 * @opflmodify cf,pf,af,zf,sf,of
1766 * @opflundef of
1767 */
1768FNIEMOP_DEF(iemOp_das)
1769{
1770 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1771 IEMOP_HLP_NO_64BIT();
1772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1773 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1774 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_das);
1775}
1776
1777
1778/**
1779 * @opcode 0x30
1780 * @opgroup og_gen_arith_bin
1781 * @opflclass logical
1782 */
1783FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1784{
1785 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1786 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1787 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1788 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */
1789 IEMOP_BODY_BINARY_rm_r8_RW(bRm, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1790}
1791
1792
1793/**
1794 * @opcode 0x31
1795 * @opgroup og_gen_arith_bin
1796 * @opflclass logical
1797 */
1798FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1799{
1800 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1801 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1803 IEMOP_BODY_BINARY_rm_rv_RW( bRm, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1804 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */
1805 IEMOP_BODY_BINARY_rm_rv_LOCKED( bRm, xor);
1806}
1807
1808
1809/**
1810 * @opcode 0x32
1811 * @opgroup og_gen_arith_bin
1812 * @opflclass logical
1813 */
1814FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1815{
1816 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1817 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1818 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1819 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */
1820 IEMOP_BODY_BINARY_r8_rm(bRm, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1821}
1822
1823
1824/**
1825 * @opcode 0x33
1826 * @opgroup og_gen_arith_bin
1827 * @opflclass logical
1828 */
1829FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1830{
1831 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1832 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1833 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1834 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */
1835 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 0, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1836}
1837
1838
1839/**
1840 * @opcode 0x34
1841 * @opgroup og_gen_arith_bin
1842 * @opflclass logical
1843 */
1844FNIEMOP_DEF(iemOp_xor_Al_Ib)
1845{
1846 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1847 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1848 IEMOP_BODY_BINARY_AL_Ib(xor, 0);
1849}
1850
1851
1852/**
1853 * @opcode 0x35
1854 * @opgroup og_gen_arith_bin
1855 * @opflclass logical
1856 */
1857FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1858{
1859 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1860 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1861 IEMOP_BODY_BINARY_rAX_Iz_RW(xor, 0);
1862}
1863
1864
1865/**
1866 * @opcode 0x36
1867 * @opmnemonic SEG
1868 * @op1 SS
1869 * @opgroup og_prefix
1870 * @openc prefix
1871 * @opdisenum OP_SEG
1872 * @ophints harmless
1873 */
1874FNIEMOP_DEF(iemOp_seg_SS)
1875{
1876 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1877 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1878 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1879
1880 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1881 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1882}
1883
1884
1885/**
1886 * @opcode 0x37
1887 * @opfltest af
1888 * @opflmodify cf,pf,af,zf,sf,of
1889 * @opflundef pf,zf,sf,of
1890 * @opgroup og_gen_arith_dec
1891 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1892 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1893 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1894 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1895 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1896 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1897 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1898 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1899 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1900 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1901 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1902 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1903 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1904 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1905 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1906 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1907 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1908 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1909 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1910 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1911 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1912 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1913 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1914 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1915 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1916 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1917 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1918 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1919 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1920 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1921 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1922 */
1923FNIEMOP_DEF(iemOp_aaa)
1924{
1925 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1926 IEMOP_HLP_NO_64BIT();
1927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1928 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1929
1930 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aaa);
1931}
1932
1933
1934/**
1935 * @opcode 0x38
1936 * @opflclass arithmetic
1937 */
1938FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1939{
1940 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1941 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1942 IEMOP_BODY_BINARY_rm_r8_RO(bRm, iemAImpl_cmp_u8, cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1943}
1944
1945
1946/**
1947 * @opcode 0x39
1948 * @opflclass arithmetic
1949 */
1950FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1951{
1952 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1953 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1954 IEMOP_BODY_BINARY_rm_rv_RO(bRm, cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1955}
1956
1957
1958/**
1959 * @opcode 0x3a
1960 * @opflclass arithmetic
1961 */
1962FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1963{
1964 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1965 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1966 IEMOP_BODY_BINARY_r8_rm_RO(bRm, cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1967}
1968
1969
1970/**
1971 * @opcode 0x3b
1972 * @opflclass arithmetic
1973 */
1974FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1975{
1976 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1977 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1978 IEMOP_BODY_BINARY_rv_rm_RO(bRm, cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1979}
1980
1981
1982/**
1983 * @opcode 0x3c
1984 * @opflclass arithmetic
1985 */
1986FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1987{
1988 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1989 IEMOP_BODY_BINARY_AL_Ib(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1990}
1991
1992
1993/**
1994 * @opcode 0x3d
1995 * @opflclass arithmetic
1996 */
1997FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1998{
1999 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
2000 IEMOP_BODY_BINARY_rAX_Iz_RO(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
2001}
2002
2003
2004/**
2005 * @opcode 0x3e
2006 */
2007FNIEMOP_DEF(iemOp_seg_DS)
2008{
2009 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
2010 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
2011 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
2012
2013 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2014 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2015}
2016
2017
2018/**
2019 * @opcode 0x3f
2020 * @opfltest af
2021 * @opflmodify cf,pf,af,zf,sf,of
2022 * @opflundef pf,zf,sf,of
2023 * @opgroup og_gen_arith_dec
2024 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
2025 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
2026 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
2027 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
2028 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
2029 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
2030 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
2031 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
2032 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
2033 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
2034 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
2035 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
2036 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
2037 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
2038 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
2039 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
2040 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
2041 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
2042 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
2043 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
2044 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
2045 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
2046 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
2047 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
2048 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
2049 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
2050 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
2051 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
2052 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
2053 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
2054 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
2055 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
2056 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
2057 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
2058 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
2059 */
2060FNIEMOP_DEF(iemOp_aas)
2061{
2062 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
2063 IEMOP_HLP_NO_64BIT();
2064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2065 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
2066
2067 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aas);
2068}
2069
2070
2071/**
2072 * Common 'inc/dec register' helper.
2073 *
2074 * Not for 64-bit code, only for what became the rex prefixes.
2075 */
2076#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
2077 switch (pVCpu->iem.s.enmEffOpSize) \
2078 { \
2079 case IEMMODE_16BIT: \
2080 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT, 0); \
2081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2082 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
2083 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
2084 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
2085 IEM_MC_REF_EFLAGS(pEFlags); \
2086 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
2087 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2088 IEM_MC_END(); \
2089 break; \
2090 \
2091 case IEMMODE_32BIT: \
2092 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
2093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2094 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
2095 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
2096 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
2097 IEM_MC_REF_EFLAGS(pEFlags); \
2098 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
2099 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
2100 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2101 IEM_MC_END(); \
2102 break; \
2103 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2104 } \
2105 (void)0
2106
2107/**
2108 * @opcode 0x40
2109 * @opflclass incdec
2110 */
2111FNIEMOP_DEF(iemOp_inc_eAX)
2112{
2113 /*
2114 * This is a REX prefix in 64-bit mode.
2115 */
2116 if (IEM_IS_64BIT_CODE(pVCpu))
2117 {
2118 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
2119 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
2120
2121 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2122 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2123 }
2124
2125 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
2126 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
2127}
2128
2129
2130/**
2131 * @opcode 0x41
2132 * @opflclass incdec
2133 */
2134FNIEMOP_DEF(iemOp_inc_eCX)
2135{
2136 /*
2137 * This is a REX prefix in 64-bit mode.
2138 */
2139 if (IEM_IS_64BIT_CODE(pVCpu))
2140 {
2141 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
2142 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
2143 pVCpu->iem.s.uRexB = 1 << 3;
2144
2145 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2146 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2147 }
2148
2149 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
2150 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
2151}
2152
2153
2154/**
2155 * @opcode 0x42
2156 * @opflclass incdec
2157 */
2158FNIEMOP_DEF(iemOp_inc_eDX)
2159{
2160 /*
2161 * This is a REX prefix in 64-bit mode.
2162 */
2163 if (IEM_IS_64BIT_CODE(pVCpu))
2164 {
2165 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
2166 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
2167 pVCpu->iem.s.uRexIndex = 1 << 3;
2168
2169 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2170 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2171 }
2172
2173 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
2174 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
2175}
2176
2177
2178
2179/**
2180 * @opcode 0x43
2181 * @opflclass incdec
2182 */
2183FNIEMOP_DEF(iemOp_inc_eBX)
2184{
2185 /*
2186 * This is a REX prefix in 64-bit mode.
2187 */
2188 if (IEM_IS_64BIT_CODE(pVCpu))
2189 {
2190 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
2191 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
2192 pVCpu->iem.s.uRexB = 1 << 3;
2193 pVCpu->iem.s.uRexIndex = 1 << 3;
2194
2195 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2196 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2197 }
2198
2199 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
2200 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
2201}
2202
2203
2204/**
2205 * @opcode 0x44
2206 * @opflclass incdec
2207 */
2208FNIEMOP_DEF(iemOp_inc_eSP)
2209{
2210 /*
2211 * This is a REX prefix in 64-bit mode.
2212 */
2213 if (IEM_IS_64BIT_CODE(pVCpu))
2214 {
2215 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
2216 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
2217 pVCpu->iem.s.uRexReg = 1 << 3;
2218
2219 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2220 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2221 }
2222
2223 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
2224 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
2225}
2226
2227
2228/**
2229 * @opcode 0x45
2230 * @opflclass incdec
2231 */
2232FNIEMOP_DEF(iemOp_inc_eBP)
2233{
2234 /*
2235 * This is a REX prefix in 64-bit mode.
2236 */
2237 if (IEM_IS_64BIT_CODE(pVCpu))
2238 {
2239 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
2240 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
2241 pVCpu->iem.s.uRexReg = 1 << 3;
2242 pVCpu->iem.s.uRexB = 1 << 3;
2243
2244 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2245 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2246 }
2247
2248 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
2249 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
2250}
2251
2252
2253/**
2254 * @opcode 0x46
2255 * @opflclass incdec
2256 */
2257FNIEMOP_DEF(iemOp_inc_eSI)
2258{
2259 /*
2260 * This is a REX prefix in 64-bit mode.
2261 */
2262 if (IEM_IS_64BIT_CODE(pVCpu))
2263 {
2264 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
2265 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
2266 pVCpu->iem.s.uRexReg = 1 << 3;
2267 pVCpu->iem.s.uRexIndex = 1 << 3;
2268
2269 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2270 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2271 }
2272
2273 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
2274 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
2275}
2276
2277
2278/**
2279 * @opcode 0x47
2280 * @opflclass incdec
2281 */
2282FNIEMOP_DEF(iemOp_inc_eDI)
2283{
2284 /*
2285 * This is a REX prefix in 64-bit mode.
2286 */
2287 if (IEM_IS_64BIT_CODE(pVCpu))
2288 {
2289 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
2290 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
2291 pVCpu->iem.s.uRexReg = 1 << 3;
2292 pVCpu->iem.s.uRexB = 1 << 3;
2293 pVCpu->iem.s.uRexIndex = 1 << 3;
2294
2295 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2296 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2297 }
2298
2299 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
2300 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
2301}
2302
2303
2304/**
2305 * @opcode 0x48
2306 * @opflclass incdec
2307 */
2308FNIEMOP_DEF(iemOp_dec_eAX)
2309{
2310 /*
2311 * This is a REX prefix in 64-bit mode.
2312 */
2313 if (IEM_IS_64BIT_CODE(pVCpu))
2314 {
2315 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
2316 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
2317 iemRecalEffOpSize(pVCpu);
2318
2319 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2320 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2321 }
2322
2323 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2324 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2325}
2326
2327
2328/**
2329 * @opcode 0x49
2330 * @opflclass incdec
2331 */
2332FNIEMOP_DEF(iemOp_dec_eCX)
2333{
2334 /*
2335 * This is a REX prefix in 64-bit mode.
2336 */
2337 if (IEM_IS_64BIT_CODE(pVCpu))
2338 {
2339 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2340 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2341 pVCpu->iem.s.uRexB = 1 << 3;
2342 iemRecalEffOpSize(pVCpu);
2343
2344 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2345 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2346 }
2347
2348 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2349 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2350}
2351
2352
2353/**
2354 * @opcode 0x4a
2355 * @opflclass incdec
2356 */
2357FNIEMOP_DEF(iemOp_dec_eDX)
2358{
2359 /*
2360 * This is a REX prefix in 64-bit mode.
2361 */
2362 if (IEM_IS_64BIT_CODE(pVCpu))
2363 {
2364 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2365 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2366 pVCpu->iem.s.uRexIndex = 1 << 3;
2367 iemRecalEffOpSize(pVCpu);
2368
2369 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2370 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2371 }
2372
2373 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2374 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2375}
2376
2377
2378/**
2379 * @opcode 0x4b
2380 * @opflclass incdec
2381 */
2382FNIEMOP_DEF(iemOp_dec_eBX)
2383{
2384 /*
2385 * This is a REX prefix in 64-bit mode.
2386 */
2387 if (IEM_IS_64BIT_CODE(pVCpu))
2388 {
2389 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2390 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2391 pVCpu->iem.s.uRexB = 1 << 3;
2392 pVCpu->iem.s.uRexIndex = 1 << 3;
2393 iemRecalEffOpSize(pVCpu);
2394
2395 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2396 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2397 }
2398
2399 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2400 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2401}
2402
2403
2404/**
2405 * @opcode 0x4c
2406 * @opflclass incdec
2407 */
2408FNIEMOP_DEF(iemOp_dec_eSP)
2409{
2410 /*
2411 * This is a REX prefix in 64-bit mode.
2412 */
2413 if (IEM_IS_64BIT_CODE(pVCpu))
2414 {
2415 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2416 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2417 pVCpu->iem.s.uRexReg = 1 << 3;
2418 iemRecalEffOpSize(pVCpu);
2419
2420 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2421 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2422 }
2423
2424 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2425 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2426}
2427
2428
2429/**
2430 * @opcode 0x4d
2431 * @opflclass incdec
2432 */
2433FNIEMOP_DEF(iemOp_dec_eBP)
2434{
2435 /*
2436 * This is a REX prefix in 64-bit mode.
2437 */
2438 if (IEM_IS_64BIT_CODE(pVCpu))
2439 {
2440 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2441 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2442 pVCpu->iem.s.uRexReg = 1 << 3;
2443 pVCpu->iem.s.uRexB = 1 << 3;
2444 iemRecalEffOpSize(pVCpu);
2445
2446 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2447 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2448 }
2449
2450 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2451 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2452}
2453
2454
2455/**
2456 * @opcode 0x4e
2457 * @opflclass incdec
2458 */
2459FNIEMOP_DEF(iemOp_dec_eSI)
2460{
2461 /*
2462 * This is a REX prefix in 64-bit mode.
2463 */
2464 if (IEM_IS_64BIT_CODE(pVCpu))
2465 {
2466 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2467 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2468 pVCpu->iem.s.uRexReg = 1 << 3;
2469 pVCpu->iem.s.uRexIndex = 1 << 3;
2470 iemRecalEffOpSize(pVCpu);
2471
2472 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2473 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2474 }
2475
2476 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2477 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2478}
2479
2480
2481/**
2482 * @opcode 0x4f
2483 * @opflclass incdec
2484 */
2485FNIEMOP_DEF(iemOp_dec_eDI)
2486{
2487 /*
2488 * This is a REX prefix in 64-bit mode.
2489 */
2490 if (IEM_IS_64BIT_CODE(pVCpu))
2491 {
2492 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2493 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2494 pVCpu->iem.s.uRexReg = 1 << 3;
2495 pVCpu->iem.s.uRexB = 1 << 3;
2496 pVCpu->iem.s.uRexIndex = 1 << 3;
2497 iemRecalEffOpSize(pVCpu);
2498
2499 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2500 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2501 }
2502
2503 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2504 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2505}
2506
2507
2508/**
2509 * Common 'push register' helper.
2510 */
2511FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2512{
2513 if (IEM_IS_64BIT_CODE(pVCpu))
2514 {
2515 iReg |= pVCpu->iem.s.uRexB;
2516 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2517 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2518 }
2519
2520 switch (pVCpu->iem.s.enmEffOpSize)
2521 {
2522 case IEMMODE_16BIT:
2523 IEM_MC_BEGIN(0, 1, 0, 0);
2524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2525 IEM_MC_LOCAL(uint16_t, u16Value);
2526 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2527 IEM_MC_PUSH_U16(u16Value);
2528 IEM_MC_ADVANCE_RIP_AND_FINISH();
2529 IEM_MC_END();
2530 break;
2531
2532 case IEMMODE_32BIT:
2533 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2535 IEM_MC_LOCAL(uint32_t, u32Value);
2536 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2537 IEM_MC_PUSH_U32(u32Value);
2538 IEM_MC_ADVANCE_RIP_AND_FINISH();
2539 IEM_MC_END();
2540 break;
2541
2542 case IEMMODE_64BIT:
2543 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2545 IEM_MC_LOCAL(uint64_t, u64Value);
2546 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2547 IEM_MC_PUSH_U64(u64Value);
2548 IEM_MC_ADVANCE_RIP_AND_FINISH();
2549 IEM_MC_END();
2550 break;
2551
2552 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2553 }
2554}
2555
2556
2557/**
2558 * @opcode 0x50
2559 */
2560FNIEMOP_DEF(iemOp_push_eAX)
2561{
2562 IEMOP_MNEMONIC(push_rAX, "push rAX");
2563 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2564}
2565
2566
2567/**
2568 * @opcode 0x51
2569 */
2570FNIEMOP_DEF(iemOp_push_eCX)
2571{
2572 IEMOP_MNEMONIC(push_rCX, "push rCX");
2573 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2574}
2575
2576
2577/**
2578 * @opcode 0x52
2579 */
2580FNIEMOP_DEF(iemOp_push_eDX)
2581{
2582 IEMOP_MNEMONIC(push_rDX, "push rDX");
2583 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2584}
2585
2586
2587/**
2588 * @opcode 0x53
2589 */
2590FNIEMOP_DEF(iemOp_push_eBX)
2591{
2592 IEMOP_MNEMONIC(push_rBX, "push rBX");
2593 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2594}
2595
2596
2597/**
2598 * @opcode 0x54
2599 */
2600FNIEMOP_DEF(iemOp_push_eSP)
2601{
2602 IEMOP_MNEMONIC(push_rSP, "push rSP");
2603 if (IEM_GET_TARGET_CPU(pVCpu) != IEMTARGETCPU_8086)
2604 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2605
2606 /* 8086 works differently wrt to 'push sp' compared to 80186 and later. */
2607 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086, 0);
2608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2609 IEM_MC_LOCAL(uint16_t, u16Value);
2610 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2611 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2612 IEM_MC_PUSH_U16(u16Value);
2613 IEM_MC_ADVANCE_RIP_AND_FINISH();
2614 IEM_MC_END();
2615}
2616
2617
2618/**
2619 * @opcode 0x55
2620 */
2621FNIEMOP_DEF(iemOp_push_eBP)
2622{
2623 IEMOP_MNEMONIC(push_rBP, "push rBP");
2624 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2625}
2626
2627
2628/**
2629 * @opcode 0x56
2630 */
2631FNIEMOP_DEF(iemOp_push_eSI)
2632{
2633 IEMOP_MNEMONIC(push_rSI, "push rSI");
2634 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2635}
2636
2637
2638/**
2639 * @opcode 0x57
2640 */
2641FNIEMOP_DEF(iemOp_push_eDI)
2642{
2643 IEMOP_MNEMONIC(push_rDI, "push rDI");
2644 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2645}
2646
2647
2648/**
2649 * Common 'pop register' helper.
2650 */
2651FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2652{
2653 if (IEM_IS_64BIT_CODE(pVCpu))
2654 {
2655 iReg |= pVCpu->iem.s.uRexB;
2656 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2657 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2658 }
2659
2660 switch (pVCpu->iem.s.enmEffOpSize)
2661 {
2662 case IEMMODE_16BIT:
2663 IEM_MC_BEGIN(0, 0, 0, 0);
2664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2665 IEM_MC_POP_GREG_U16(iReg);
2666 IEM_MC_ADVANCE_RIP_AND_FINISH();
2667 IEM_MC_END();
2668 break;
2669
2670 case IEMMODE_32BIT:
2671 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2673 IEM_MC_POP_GREG_U32(iReg);
2674 IEM_MC_ADVANCE_RIP_AND_FINISH();
2675 IEM_MC_END();
2676 break;
2677
2678 case IEMMODE_64BIT:
2679 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
2680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2681 IEM_MC_POP_GREG_U64(iReg);
2682 IEM_MC_ADVANCE_RIP_AND_FINISH();
2683 IEM_MC_END();
2684 break;
2685
2686 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2687 }
2688}
2689
2690
2691/**
2692 * @opcode 0x58
2693 */
2694FNIEMOP_DEF(iemOp_pop_eAX)
2695{
2696 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2697 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2698}
2699
2700
2701/**
2702 * @opcode 0x59
2703 */
2704FNIEMOP_DEF(iemOp_pop_eCX)
2705{
2706 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2707 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2708}
2709
2710
2711/**
2712 * @opcode 0x5a
2713 */
2714FNIEMOP_DEF(iemOp_pop_eDX)
2715{
2716 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2717 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2718}
2719
2720
2721/**
2722 * @opcode 0x5b
2723 */
2724FNIEMOP_DEF(iemOp_pop_eBX)
2725{
2726 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2727 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2728}
2729
2730
2731/**
2732 * @opcode 0x5c
2733 */
2734FNIEMOP_DEF(iemOp_pop_eSP)
2735{
2736 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2737 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2738}
2739
2740
2741/**
2742 * @opcode 0x5d
2743 */
2744FNIEMOP_DEF(iemOp_pop_eBP)
2745{
2746 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2747 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2748}
2749
2750
2751/**
2752 * @opcode 0x5e
2753 */
2754FNIEMOP_DEF(iemOp_pop_eSI)
2755{
2756 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2757 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2758}
2759
2760
2761/**
2762 * @opcode 0x5f
2763 */
2764FNIEMOP_DEF(iemOp_pop_eDI)
2765{
2766 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2767 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2768}
2769
2770
2771/**
2772 * @opcode 0x60
2773 */
2774FNIEMOP_DEF(iemOp_pusha)
2775{
2776 IEMOP_MNEMONIC(pusha, "pusha");
2777 IEMOP_HLP_MIN_186();
2778 IEMOP_HLP_NO_64BIT();
2779 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2780 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_16);
2781 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2782 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_32);
2783}
2784
2785
2786/**
2787 * @opcode 0x61
2788 */
2789FNIEMOP_DEF(iemOp_popa__mvex)
2790{
2791 if (!IEM_IS_64BIT_CODE(pVCpu))
2792 {
2793 IEMOP_MNEMONIC(popa, "popa");
2794 IEMOP_HLP_MIN_186();
2795 IEMOP_HLP_NO_64BIT();
2796 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2797 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2798 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2799 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2800 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2801 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2802 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2803 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2804 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2805 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2806 iemCImpl_popa_16);
2807 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2808 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2809 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2810 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2811 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2812 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2813 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2814 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2815 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2816 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2817 iemCImpl_popa_32);
2818 }
2819 IEMOP_MNEMONIC(mvex, "mvex");
2820 Log(("mvex prefix is not supported!\n"));
2821 IEMOP_RAISE_INVALID_OPCODE_RET();
2822}
2823
2824
2825/**
2826 * @opcode 0x62
2827 * @opmnemonic bound
2828 * @op1 Gv_RO
2829 * @op2 Ma
2830 * @opmincpu 80186
2831 * @ophints harmless x86_invalid_64
2832 * @optest op1=0 op2=0 ->
2833 * @optest op1=1 op2=0 -> value.xcpt=5
2834 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2835 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2836 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2837 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2838 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2839 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2840 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2841 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2842 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2843 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2844 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2845 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2846 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2847 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2848 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2849 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2850 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2851 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2852 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2853 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2854 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2855 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2856 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2857 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2858 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2859 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2860 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2861 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2862 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2863 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2864 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2865 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2866 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2867 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2868 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2869 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2870 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2871 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2872 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2873 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2874 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2875 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2876 */
2877FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2878{
2879 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2880 compatability mode it is invalid with MOD=3.
2881
2882 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2883 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2884 given as R and X without an exact description, so we assume it builds on
2885 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2886 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2887 uint8_t bRm;
2888 if (!IEM_IS_64BIT_CODE(pVCpu))
2889 {
2890 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2891 IEMOP_HLP_MIN_186();
2892 IEM_OPCODE_GET_NEXT_U8(&bRm);
2893 if (IEM_IS_MODRM_MEM_MODE(bRm))
2894 {
2895 /** @todo testcase: check that there are two memory accesses involved. Check
2896 * whether they're both read before the \#BR triggers. */
2897 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2898 {
2899 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
2900 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2901 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2902 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2904
2905 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2907
2908 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2909 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2910 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2911
2912 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2913 IEM_MC_END();
2914 }
2915 else /* 32-bit operands */
2916 {
2917 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2918 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2919 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2920 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2922
2923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2925
2926 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2927 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2928 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2929
2930 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2931 IEM_MC_END();
2932 }
2933 }
2934
2935 /*
2936 * @opdone
2937 */
2938 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2939 {
2940 /* Note that there is no need for the CPU to fetch further bytes
2941 here because MODRM.MOD == 3. */
2942 Log(("evex not supported by the guest CPU!\n"));
2943 IEMOP_RAISE_INVALID_OPCODE_RET();
2944 }
2945 }
2946 else
2947 {
2948 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2949 * does modr/m read, whereas AMD probably doesn't... */
2950 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2951 {
2952 Log(("evex not supported by the guest CPU!\n"));
2953 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2954 }
2955 IEM_OPCODE_GET_NEXT_U8(&bRm);
2956 }
2957
2958 IEMOP_MNEMONIC(evex, "evex");
2959 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2960 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2961 Log(("evex prefix is not implemented!\n"));
2962 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2963}
2964
2965
2966/**
2967 * @opcode 0x63
2968 * @opflmodify zf
2969 * @note non-64-bit modes.
2970 */
2971FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2972{
2973 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2974 IEMOP_HLP_MIN_286();
2975 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2977
2978 if (IEM_IS_MODRM_REG_MODE(bRm))
2979 {
2980 /* Register */
2981 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2982 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2983 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2984 IEM_MC_ARG(uint16_t, u16Src, 1);
2985 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2986
2987 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2988 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2989 IEM_MC_REF_EFLAGS(pEFlags);
2990 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2991
2992 IEM_MC_ADVANCE_RIP_AND_FINISH();
2993 IEM_MC_END();
2994 }
2995 else
2996 {
2997 /* Memory */
2998 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2999 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3000 IEM_MC_ARG(uint16_t, u16Src, 1);
3001 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
3002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3003 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
3004
3005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3006 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
3007 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3008 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
3009 IEM_MC_FETCH_EFLAGS(EFlags);
3010 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
3011
3012 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
3013 IEM_MC_COMMIT_EFLAGS(EFlags);
3014 IEM_MC_ADVANCE_RIP_AND_FINISH();
3015 IEM_MC_END();
3016 }
3017}
3018
3019
3020/**
3021 * @opcode 0x63
3022 *
3023 * @note This is a weird one. It works like a regular move instruction if
3024 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
3025 * @todo This definitely needs a testcase to verify the odd cases. */
3026FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
3027{
3028 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
3029
3030 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
3031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3032
3033 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3034 {
3035 if (IEM_IS_MODRM_REG_MODE(bRm))
3036 {
3037 /*
3038 * Register to register.
3039 */
3040 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
3041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3042 IEM_MC_LOCAL(uint64_t, u64Value);
3043 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
3044 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
3045 IEM_MC_ADVANCE_RIP_AND_FINISH();
3046 IEM_MC_END();
3047 }
3048 else
3049 {
3050 /*
3051 * We're loading a register from memory.
3052 */
3053 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
3054 IEM_MC_LOCAL(uint64_t, u64Value);
3055 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3058 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3059 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
3060 IEM_MC_ADVANCE_RIP_AND_FINISH();
3061 IEM_MC_END();
3062 }
3063 }
3064 else
3065 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
3066}
3067
3068
3069/**
3070 * @opcode 0x64
3071 * @opmnemonic segfs
3072 * @opmincpu 80386
3073 * @opgroup og_prefixes
3074 */
3075FNIEMOP_DEF(iemOp_seg_FS)
3076{
3077 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
3078 IEMOP_HLP_MIN_386();
3079
3080 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
3081 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
3082
3083 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3084 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3085}
3086
3087
3088/**
3089 * @opcode 0x65
3090 * @opmnemonic seggs
3091 * @opmincpu 80386
3092 * @opgroup og_prefixes
3093 */
3094FNIEMOP_DEF(iemOp_seg_GS)
3095{
3096 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
3097 IEMOP_HLP_MIN_386();
3098
3099 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
3100 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
3101
3102 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3103 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3104}
3105
3106
3107/**
3108 * @opcode 0x66
3109 * @opmnemonic opsize
3110 * @openc prefix
3111 * @opmincpu 80386
3112 * @ophints harmless
3113 * @opgroup og_prefixes
3114 */
3115FNIEMOP_DEF(iemOp_op_size)
3116{
3117 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
3118 IEMOP_HLP_MIN_386();
3119
3120 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
3121 iemRecalEffOpSize(pVCpu);
3122
3123 /* For the 4 entry opcode tables, the operand prefix doesn't not count
3124 when REPZ or REPNZ are present. */
3125 if (pVCpu->iem.s.idxPrefix == 0)
3126 pVCpu->iem.s.idxPrefix = 1;
3127
3128 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3129 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3130}
3131
3132
3133/**
3134 * @opcode 0x67
3135 * @opmnemonic addrsize
3136 * @openc prefix
3137 * @opmincpu 80386
3138 * @ophints harmless
3139 * @opgroup og_prefixes
3140 */
3141FNIEMOP_DEF(iemOp_addr_size)
3142{
3143 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
3144 IEMOP_HLP_MIN_386();
3145
3146 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
3147 switch (pVCpu->iem.s.enmDefAddrMode)
3148 {
3149 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
3150 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
3151 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
3152 default: AssertFailed();
3153 }
3154
3155 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3156 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3157}
3158
3159
3160/**
3161 * @opcode 0x68
3162 */
3163FNIEMOP_DEF(iemOp_push_Iz)
3164{
3165 IEMOP_MNEMONIC(push_Iz, "push Iz");
3166 IEMOP_HLP_MIN_186();
3167 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3168 switch (pVCpu->iem.s.enmEffOpSize)
3169 {
3170 case IEMMODE_16BIT:
3171 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
3172 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3174 IEM_MC_LOCAL_CONST(uint16_t, u16Value, u16Imm);
3175 IEM_MC_PUSH_U16(u16Value);
3176 IEM_MC_ADVANCE_RIP_AND_FINISH();
3177 IEM_MC_END();
3178 break;
3179
3180 case IEMMODE_32BIT:
3181 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3182 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3184 IEM_MC_LOCAL_CONST(uint32_t, u32Value, u32Imm);
3185 IEM_MC_PUSH_U32(u32Value);
3186 IEM_MC_ADVANCE_RIP_AND_FINISH();
3187 IEM_MC_END();
3188 break;
3189
3190 case IEMMODE_64BIT:
3191 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
3192 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3194 IEM_MC_LOCAL_CONST(uint64_t, u64Value, u64Imm);
3195 IEM_MC_PUSH_U64(u64Value);
3196 IEM_MC_ADVANCE_RIP_AND_FINISH();
3197 IEM_MC_END();
3198 break;
3199
3200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3201 }
3202}
3203
3204
3205/**
3206 * @opcode 0x69
3207 * @opflclass multiply
3208 */
3209FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
3210{
3211 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
3212 IEMOP_HLP_MIN_186();
3213 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3214 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3215
3216 switch (pVCpu->iem.s.enmEffOpSize)
3217 {
3218 case IEMMODE_16BIT:
3219 {
3220 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3221 if (IEM_IS_MODRM_REG_MODE(bRm))
3222 {
3223 /* register operand */
3224 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3225 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3227 IEM_MC_LOCAL(uint16_t, u16Tmp);
3228 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3229 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3230 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
3231 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3232 IEM_MC_REF_EFLAGS(pEFlags);
3233 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3234 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3235
3236 IEM_MC_ADVANCE_RIP_AND_FINISH();
3237 IEM_MC_END();
3238 }
3239 else
3240 {
3241 /* memory operand */
3242 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
3245
3246 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3248
3249 IEM_MC_LOCAL(uint16_t, u16Tmp);
3250 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3251
3252 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3253 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3255 IEM_MC_REF_EFLAGS(pEFlags);
3256 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3257 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3258
3259 IEM_MC_ADVANCE_RIP_AND_FINISH();
3260 IEM_MC_END();
3261 }
3262 break;
3263 }
3264
3265 case IEMMODE_32BIT:
3266 {
3267 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3268 if (IEM_IS_MODRM_REG_MODE(bRm))
3269 {
3270 /* register operand */
3271 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3272 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3274 IEM_MC_LOCAL(uint32_t, u32Tmp);
3275 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3276
3277 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3278 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
3279 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3280 IEM_MC_REF_EFLAGS(pEFlags);
3281 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3282 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3283
3284 IEM_MC_ADVANCE_RIP_AND_FINISH();
3285 IEM_MC_END();
3286 }
3287 else
3288 {
3289 /* memory operand */
3290 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3291 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3293
3294 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3296
3297 IEM_MC_LOCAL(uint32_t, u32Tmp);
3298 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3299
3300 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3301 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3302 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3303 IEM_MC_REF_EFLAGS(pEFlags);
3304 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3305 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3306
3307 IEM_MC_ADVANCE_RIP_AND_FINISH();
3308 IEM_MC_END();
3309 }
3310 break;
3311 }
3312
3313 case IEMMODE_64BIT:
3314 {
3315 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3316 if (IEM_IS_MODRM_REG_MODE(bRm))
3317 {
3318 /* register operand */
3319 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3320 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3322 IEM_MC_LOCAL(uint64_t, u64Tmp);
3323 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3324
3325 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3326 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
3327 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3328 IEM_MC_REF_EFLAGS(pEFlags);
3329 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3330 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3331
3332 IEM_MC_ADVANCE_RIP_AND_FINISH();
3333 IEM_MC_END();
3334 }
3335 else
3336 {
3337 /* memory operand */
3338 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3341
3342 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /* parameter count for the threaded function for this block. */
3344
3345 IEM_MC_LOCAL(uint64_t, u64Tmp);
3346 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3347
3348 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3349 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int32_t)u32Imm, 1);
3350 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3351 IEM_MC_REF_EFLAGS(pEFlags);
3352 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3353 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3354
3355 IEM_MC_ADVANCE_RIP_AND_FINISH();
3356 IEM_MC_END();
3357 }
3358 break;
3359 }
3360
3361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3362 }
3363}
3364
3365
3366/**
3367 * @opcode 0x6a
3368 */
3369FNIEMOP_DEF(iemOp_push_Ib)
3370{
3371 IEMOP_MNEMONIC(push_Ib, "push Ib");
3372 IEMOP_HLP_MIN_186();
3373 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3374 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3375
3376 switch (pVCpu->iem.s.enmEffOpSize)
3377 {
3378 case IEMMODE_16BIT:
3379 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_186, 0);
3380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3381 IEM_MC_LOCAL_CONST(uint16_t, uValue, (int16_t)i8Imm);
3382 IEM_MC_PUSH_U16(uValue);
3383 IEM_MC_ADVANCE_RIP_AND_FINISH();
3384 IEM_MC_END();
3385 break;
3386 case IEMMODE_32BIT:
3387 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3389 IEM_MC_LOCAL_CONST(uint32_t, uValue, (int32_t)i8Imm);
3390 IEM_MC_PUSH_U32(uValue);
3391 IEM_MC_ADVANCE_RIP_AND_FINISH();
3392 IEM_MC_END();
3393 break;
3394 case IEMMODE_64BIT:
3395 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
3396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3397 IEM_MC_LOCAL_CONST(uint64_t, uValue, (int64_t)i8Imm);
3398 IEM_MC_PUSH_U64(uValue);
3399 IEM_MC_ADVANCE_RIP_AND_FINISH();
3400 IEM_MC_END();
3401 break;
3402 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3403 }
3404}
3405
3406
3407/**
3408 * @opcode 0x6b
3409 * @opflclass multiply
3410 */
3411FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3412{
3413 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3414 IEMOP_HLP_MIN_186();
3415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3416 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3417
3418 switch (pVCpu->iem.s.enmEffOpSize)
3419 {
3420 case IEMMODE_16BIT:
3421 {
3422 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3423 if (IEM_IS_MODRM_REG_MODE(bRm))
3424 {
3425 /* register operand */
3426 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3427 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3429
3430 IEM_MC_LOCAL(uint16_t, u16Tmp);
3431 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3432
3433 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3434 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3435 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3436 IEM_MC_REF_EFLAGS(pEFlags);
3437 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3438 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3439
3440 IEM_MC_ADVANCE_RIP_AND_FINISH();
3441 IEM_MC_END();
3442 }
3443 else
3444 {
3445 /* memory operand */
3446 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3447
3448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3450
3451 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3453
3454 IEM_MC_LOCAL(uint16_t, u16Tmp);
3455 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3456
3457 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3458 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3459 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3460 IEM_MC_REF_EFLAGS(pEFlags);
3461 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3462 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3463
3464 IEM_MC_ADVANCE_RIP_AND_FINISH();
3465 IEM_MC_END();
3466 }
3467 break;
3468 }
3469
3470 case IEMMODE_32BIT:
3471 {
3472 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3473 if (IEM_IS_MODRM_REG_MODE(bRm))
3474 {
3475 /* register operand */
3476 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3477 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3479 IEM_MC_LOCAL(uint32_t, u32Tmp);
3480 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3481
3482 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3483 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3484 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3485 IEM_MC_REF_EFLAGS(pEFlags);
3486 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3487 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3488
3489 IEM_MC_ADVANCE_RIP_AND_FINISH();
3490 IEM_MC_END();
3491 }
3492 else
3493 {
3494 /* memory operand */
3495 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3498
3499 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3501
3502 IEM_MC_LOCAL(uint32_t, u32Tmp);
3503 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3504
3505 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3506 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3507 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3508 IEM_MC_REF_EFLAGS(pEFlags);
3509 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3510 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3511
3512 IEM_MC_ADVANCE_RIP_AND_FINISH();
3513 IEM_MC_END();
3514 }
3515 break;
3516 }
3517
3518 case IEMMODE_64BIT:
3519 {
3520 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3521 if (IEM_IS_MODRM_REG_MODE(bRm))
3522 {
3523 /* register operand */
3524 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3525 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3527 IEM_MC_LOCAL(uint64_t, u64Tmp);
3528 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3529
3530 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3531 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3532 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3533 IEM_MC_REF_EFLAGS(pEFlags);
3534 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3535 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3536
3537 IEM_MC_ADVANCE_RIP_AND_FINISH();
3538 IEM_MC_END();
3539 }
3540 else
3541 {
3542 /* memory operand */
3543 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3546
3547 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the threaded parameter count. */
3548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3549
3550 IEM_MC_LOCAL(uint64_t, u64Tmp);
3551 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3552
3553 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3554 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3555 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3556 IEM_MC_REF_EFLAGS(pEFlags);
3557 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3558 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3559
3560 IEM_MC_ADVANCE_RIP_AND_FINISH();
3561 IEM_MC_END();
3562 }
3563 break;
3564 }
3565
3566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3567 }
3568}
3569
3570
3571/**
3572 * @opcode 0x6c
3573 * @opfltest iopl,df
3574 */
3575FNIEMOP_DEF(iemOp_insb_Yb_DX)
3576{
3577 IEMOP_HLP_MIN_186();
3578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3579 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3580 {
3581 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3582 switch (pVCpu->iem.s.enmEffAddrMode)
3583 {
3584 case IEMMODE_16BIT:
3585 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3586 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3587 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3588 iemCImpl_rep_ins_op8_addr16, false);
3589 case IEMMODE_32BIT:
3590 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3591 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3592 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3593 iemCImpl_rep_ins_op8_addr32, false);
3594 case IEMMODE_64BIT:
3595 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3596 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3597 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3598 iemCImpl_rep_ins_op8_addr64, false);
3599 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3600 }
3601 }
3602 else
3603 {
3604 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3605 switch (pVCpu->iem.s.enmEffAddrMode)
3606 {
3607 case IEMMODE_16BIT:
3608 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3609 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3610 iemCImpl_ins_op8_addr16, false);
3611 case IEMMODE_32BIT:
3612 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3613 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3614 iemCImpl_ins_op8_addr32, false);
3615 case IEMMODE_64BIT:
3616 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3617 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3618 iemCImpl_ins_op8_addr64, false);
3619 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3620 }
3621 }
3622}
3623
3624
3625/**
3626 * @opcode 0x6d
3627 * @opfltest iopl,df
3628 */
3629FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3630{
3631 IEMOP_HLP_MIN_186();
3632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3633 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3634 {
3635 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3636 switch (pVCpu->iem.s.enmEffOpSize)
3637 {
3638 case IEMMODE_16BIT:
3639 switch (pVCpu->iem.s.enmEffAddrMode)
3640 {
3641 case IEMMODE_16BIT:
3642 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3643 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3644 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3645 iemCImpl_rep_ins_op16_addr16, false);
3646 case IEMMODE_32BIT:
3647 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3648 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3649 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3650 iemCImpl_rep_ins_op16_addr32, false);
3651 case IEMMODE_64BIT:
3652 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3653 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3654 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3655 iemCImpl_rep_ins_op16_addr64, false);
3656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3657 }
3658 break;
3659 case IEMMODE_64BIT:
3660 case IEMMODE_32BIT:
3661 switch (pVCpu->iem.s.enmEffAddrMode)
3662 {
3663 case IEMMODE_16BIT:
3664 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3665 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3666 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3667 iemCImpl_rep_ins_op32_addr16, false);
3668 case IEMMODE_32BIT:
3669 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3670 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3671 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3672 iemCImpl_rep_ins_op32_addr32, false);
3673 case IEMMODE_64BIT:
3674 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3675 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3676 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3677 iemCImpl_rep_ins_op32_addr64, false);
3678 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3679 }
3680 break;
3681 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3682 }
3683 }
3684 else
3685 {
3686 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3687 switch (pVCpu->iem.s.enmEffOpSize)
3688 {
3689 case IEMMODE_16BIT:
3690 switch (pVCpu->iem.s.enmEffAddrMode)
3691 {
3692 case IEMMODE_16BIT:
3693 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3694 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3695 iemCImpl_ins_op16_addr16, false);
3696 case IEMMODE_32BIT:
3697 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3698 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3699 iemCImpl_ins_op16_addr32, false);
3700 case IEMMODE_64BIT:
3701 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3702 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3703 iemCImpl_ins_op16_addr64, false);
3704 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3705 }
3706 break;
3707 case IEMMODE_64BIT:
3708 case IEMMODE_32BIT:
3709 switch (pVCpu->iem.s.enmEffAddrMode)
3710 {
3711 case IEMMODE_16BIT:
3712 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3713 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3714 iemCImpl_ins_op32_addr16, false);
3715 case IEMMODE_32BIT:
3716 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3717 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3718 iemCImpl_ins_op32_addr32, false);
3719 case IEMMODE_64BIT:
3720 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3721 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3722 iemCImpl_ins_op32_addr64, false);
3723 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3724 }
3725 break;
3726 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3727 }
3728 }
3729}
3730
3731
3732/**
3733 * @opcode 0x6e
3734 * @opfltest iopl,df
3735 */
3736FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3737{
3738 IEMOP_HLP_MIN_186();
3739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3740 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3741 {
3742 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3743 switch (pVCpu->iem.s.enmEffAddrMode)
3744 {
3745 case IEMMODE_16BIT:
3746 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3747 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3748 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3749 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3750 case IEMMODE_32BIT:
3751 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3752 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3753 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3754 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3755 case IEMMODE_64BIT:
3756 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3757 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3758 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3759 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3761 }
3762 }
3763 else
3764 {
3765 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3766 switch (pVCpu->iem.s.enmEffAddrMode)
3767 {
3768 case IEMMODE_16BIT:
3769 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3770 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3771 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3772 case IEMMODE_32BIT:
3773 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3774 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3775 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3776 case IEMMODE_64BIT:
3777 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3778 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3779 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3780 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3781 }
3782 }
3783}
3784
3785
3786/**
3787 * @opcode 0x6f
3788 * @opfltest iopl,df
3789 */
3790FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3791{
3792 IEMOP_HLP_MIN_186();
3793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3794 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3795 {
3796 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3797 switch (pVCpu->iem.s.enmEffOpSize)
3798 {
3799 case IEMMODE_16BIT:
3800 switch (pVCpu->iem.s.enmEffAddrMode)
3801 {
3802 case IEMMODE_16BIT:
3803 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3804 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3805 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3806 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3807 case IEMMODE_32BIT:
3808 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3809 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3810 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3811 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3812 case IEMMODE_64BIT:
3813 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3814 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3815 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3816 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3817 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3818 }
3819 break;
3820 case IEMMODE_64BIT:
3821 case IEMMODE_32BIT:
3822 switch (pVCpu->iem.s.enmEffAddrMode)
3823 {
3824 case IEMMODE_16BIT:
3825 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3826 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3827 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3828 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3829 case IEMMODE_32BIT:
3830 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3831 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3832 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3833 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3834 case IEMMODE_64BIT:
3835 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3836 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3837 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3838 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3840 }
3841 break;
3842 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3843 }
3844 }
3845 else
3846 {
3847 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3848 switch (pVCpu->iem.s.enmEffOpSize)
3849 {
3850 case IEMMODE_16BIT:
3851 switch (pVCpu->iem.s.enmEffAddrMode)
3852 {
3853 case IEMMODE_16BIT:
3854 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3855 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3856 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3857 case IEMMODE_32BIT:
3858 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3859 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3860 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3861 case IEMMODE_64BIT:
3862 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3863 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3864 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3865 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3866 }
3867 break;
3868 case IEMMODE_64BIT:
3869 case IEMMODE_32BIT:
3870 switch (pVCpu->iem.s.enmEffAddrMode)
3871 {
3872 case IEMMODE_16BIT:
3873 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3874 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3875 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3876 case IEMMODE_32BIT:
3877 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3878 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3879 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3880 case IEMMODE_64BIT:
3881 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3882 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3883 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3884 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3885 }
3886 break;
3887 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3888 }
3889 }
3890}
3891
3892
3893/**
3894 * @opcode 0x70
3895 * @opfltest of
3896 */
3897FNIEMOP_DEF(iemOp_jo_Jb)
3898{
3899 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3900 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3901 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3902
3903 IEM_MC_BEGIN(0, 0, 0, 0);
3904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3905 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3906 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3907 } IEM_MC_ELSE() {
3908 IEM_MC_ADVANCE_RIP_AND_FINISH();
3909 } IEM_MC_ENDIF();
3910 IEM_MC_END();
3911}
3912
3913
3914/**
3915 * @opcode 0x71
3916 * @opfltest of
3917 */
3918FNIEMOP_DEF(iemOp_jno_Jb)
3919{
3920 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3921 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3922 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3923
3924 IEM_MC_BEGIN(0, 0, 0, 0);
3925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3926 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3927 IEM_MC_ADVANCE_RIP_AND_FINISH();
3928 } IEM_MC_ELSE() {
3929 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3930 } IEM_MC_ENDIF();
3931 IEM_MC_END();
3932}
3933
3934/**
3935 * @opcode 0x72
3936 * @opfltest cf
3937 */
3938FNIEMOP_DEF(iemOp_jc_Jb)
3939{
3940 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3941 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3942 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3943
3944 IEM_MC_BEGIN(0, 0, 0, 0);
3945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3946 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3947 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3948 } IEM_MC_ELSE() {
3949 IEM_MC_ADVANCE_RIP_AND_FINISH();
3950 } IEM_MC_ENDIF();
3951 IEM_MC_END();
3952}
3953
3954
3955/**
3956 * @opcode 0x73
3957 * @opfltest cf
3958 */
3959FNIEMOP_DEF(iemOp_jnc_Jb)
3960{
3961 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3962 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3963 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3964
3965 IEM_MC_BEGIN(0, 0, 0, 0);
3966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3967 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3968 IEM_MC_ADVANCE_RIP_AND_FINISH();
3969 } IEM_MC_ELSE() {
3970 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3971 } IEM_MC_ENDIF();
3972 IEM_MC_END();
3973}
3974
3975
3976/**
3977 * @opcode 0x74
3978 * @opfltest zf
3979 */
3980FNIEMOP_DEF(iemOp_je_Jb)
3981{
3982 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3983 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3984 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3985
3986 IEM_MC_BEGIN(0, 0, 0, 0);
3987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3988 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3989 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3990 } IEM_MC_ELSE() {
3991 IEM_MC_ADVANCE_RIP_AND_FINISH();
3992 } IEM_MC_ENDIF();
3993 IEM_MC_END();
3994}
3995
3996
3997/**
3998 * @opcode 0x75
3999 * @opfltest zf
4000 */
4001FNIEMOP_DEF(iemOp_jne_Jb)
4002{
4003 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
4004 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4005 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4006
4007 IEM_MC_BEGIN(0, 0, 0, 0);
4008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4009 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4010 IEM_MC_ADVANCE_RIP_AND_FINISH();
4011 } IEM_MC_ELSE() {
4012 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4013 } IEM_MC_ENDIF();
4014 IEM_MC_END();
4015}
4016
4017
4018/**
4019 * @opcode 0x76
4020 * @opfltest cf,zf
4021 */
4022FNIEMOP_DEF(iemOp_jbe_Jb)
4023{
4024 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
4025 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4026 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4027
4028 IEM_MC_BEGIN(0, 0, 0, 0);
4029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4030 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4031 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4032 } IEM_MC_ELSE() {
4033 IEM_MC_ADVANCE_RIP_AND_FINISH();
4034 } IEM_MC_ENDIF();
4035 IEM_MC_END();
4036}
4037
4038
4039/**
4040 * @opcode 0x77
4041 * @opfltest cf,zf
4042 */
4043FNIEMOP_DEF(iemOp_jnbe_Jb)
4044{
4045 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
4046 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4047 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4048
4049 IEM_MC_BEGIN(0, 0, 0, 0);
4050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4051 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4052 IEM_MC_ADVANCE_RIP_AND_FINISH();
4053 } IEM_MC_ELSE() {
4054 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4055 } IEM_MC_ENDIF();
4056 IEM_MC_END();
4057}
4058
4059
4060/**
4061 * @opcode 0x78
4062 * @opfltest sf
4063 */
4064FNIEMOP_DEF(iemOp_js_Jb)
4065{
4066 IEMOP_MNEMONIC(js_Jb, "js Jb");
4067 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4068 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4069
4070 IEM_MC_BEGIN(0, 0, 0, 0);
4071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4072 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4073 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4074 } IEM_MC_ELSE() {
4075 IEM_MC_ADVANCE_RIP_AND_FINISH();
4076 } IEM_MC_ENDIF();
4077 IEM_MC_END();
4078}
4079
4080
4081/**
4082 * @opcode 0x79
4083 * @opfltest sf
4084 */
4085FNIEMOP_DEF(iemOp_jns_Jb)
4086{
4087 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
4088 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4089 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4090
4091 IEM_MC_BEGIN(0, 0, 0, 0);
4092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4093 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4094 IEM_MC_ADVANCE_RIP_AND_FINISH();
4095 } IEM_MC_ELSE() {
4096 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4097 } IEM_MC_ENDIF();
4098 IEM_MC_END();
4099}
4100
4101
4102/**
4103 * @opcode 0x7a
4104 * @opfltest pf
4105 */
4106FNIEMOP_DEF(iemOp_jp_Jb)
4107{
4108 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
4109 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4110 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4111
4112 IEM_MC_BEGIN(0, 0, 0, 0);
4113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4114 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4115 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4116 } IEM_MC_ELSE() {
4117 IEM_MC_ADVANCE_RIP_AND_FINISH();
4118 } IEM_MC_ENDIF();
4119 IEM_MC_END();
4120}
4121
4122
4123/**
4124 * @opcode 0x7b
4125 * @opfltest pf
4126 */
4127FNIEMOP_DEF(iemOp_jnp_Jb)
4128{
4129 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
4130 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4131 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4132
4133 IEM_MC_BEGIN(0, 0, 0, 0);
4134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4135 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4136 IEM_MC_ADVANCE_RIP_AND_FINISH();
4137 } IEM_MC_ELSE() {
4138 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4139 } IEM_MC_ENDIF();
4140 IEM_MC_END();
4141}
4142
4143
4144/**
4145 * @opcode 0x7c
4146 * @opfltest sf,of
4147 */
4148FNIEMOP_DEF(iemOp_jl_Jb)
4149{
4150 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
4151 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4152 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4153
4154 IEM_MC_BEGIN(0, 0, 0, 0);
4155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4156 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4157 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4158 } IEM_MC_ELSE() {
4159 IEM_MC_ADVANCE_RIP_AND_FINISH();
4160 } IEM_MC_ENDIF();
4161 IEM_MC_END();
4162}
4163
4164
4165/**
4166 * @opcode 0x7d
4167 * @opfltest sf,of
4168 */
4169FNIEMOP_DEF(iemOp_jnl_Jb)
4170{
4171 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
4172 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4173 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4174
4175 IEM_MC_BEGIN(0, 0, 0, 0);
4176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4177 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4178 IEM_MC_ADVANCE_RIP_AND_FINISH();
4179 } IEM_MC_ELSE() {
4180 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4181 } IEM_MC_ENDIF();
4182 IEM_MC_END();
4183}
4184
4185
4186/**
4187 * @opcode 0x7e
4188 * @opfltest zf,sf,of
4189 */
4190FNIEMOP_DEF(iemOp_jle_Jb)
4191{
4192 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
4193 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4194 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4195
4196 IEM_MC_BEGIN(0, 0, 0, 0);
4197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4198 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4199 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4200 } IEM_MC_ELSE() {
4201 IEM_MC_ADVANCE_RIP_AND_FINISH();
4202 } IEM_MC_ENDIF();
4203 IEM_MC_END();
4204}
4205
4206
4207/**
4208 * @opcode 0x7f
4209 * @opfltest zf,sf,of
4210 */
4211FNIEMOP_DEF(iemOp_jnle_Jb)
4212{
4213 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
4214 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4215 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4216
4217 IEM_MC_BEGIN(0, 0, 0, 0);
4218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4219 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4220 IEM_MC_ADVANCE_RIP_AND_FINISH();
4221 } IEM_MC_ELSE() {
4222 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4223 } IEM_MC_ENDIF();
4224 IEM_MC_END();
4225}
4226
4227
4228/**
4229 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4230 * iemOp_Grp1_Eb_Ib_80.
4231 */
4232#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_InsNm, a_fRegNativeArchs, a_fMemNativeArchs) \
4233 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4234 { \
4235 /* register target */ \
4236 IEM_MC_BEGIN(3, 2, 0, 0); \
4237 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4239 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
4240 IEM_MC_LOCAL(uint8_t, u8Dst); \
4241 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4242 IEM_MC_LOCAL(uint32_t, uEFlags); \
4243 IEM_MC_FETCH_EFLAGS(uEFlags); \
4244 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u8Dst, u8Imm, uEFlags, 8, 8); \
4245 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Dst); \
4246 IEM_MC_COMMIT_EFLAGS(uEFlags); \
4247 } IEM_MC_NATIVE_ELSE() { \
4248 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4249 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4250 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4251 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4252 IEM_MC_REF_EFLAGS(pEFlags); \
4253 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
4254 } IEM_MC_NATIVE_ENDIF(); \
4255 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4256 IEM_MC_END(); \
4257 } \
4258 else \
4259 { \
4260 /* memory target */ \
4261 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4262 { \
4263 IEM_MC_BEGIN(3, 3, 0, 0); \
4264 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4265 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4267 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4268 \
4269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4270 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4271 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4272 IEMOP_HLP_DONE_DECODING(); \
4273 \
4274 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4275 IEM_MC_FETCH_EFLAGS(EFlags); \
4276 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
4277 \
4278 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4279 IEM_MC_COMMIT_EFLAGS(EFlags); \
4280 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4281 IEM_MC_END(); \
4282 } \
4283 else \
4284 { \
4285 IEM_MC_BEGIN(3, 3, 0, 0); \
4286 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4287 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4289 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4290 \
4291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4292 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4293 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4294 IEMOP_HLP_DONE_DECODING(); \
4295 \
4296 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4297 IEM_MC_FETCH_EFLAGS(EFlags); \
4298 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8_locked), pu8Dst, u8Src, pEFlags); \
4299 \
4300 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4301 IEM_MC_COMMIT_EFLAGS(EFlags); \
4302 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4303 IEM_MC_END(); \
4304 } \
4305 } \
4306 (void)0
4307
4308#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_InsNm, a_fNativeArchs) \
4309 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4310 { \
4311 /* register target */ \
4312 IEM_MC_BEGIN(3, 2, 0, 0); \
4313 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4315 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4316 IEM_MC_LOCAL(uint8_t, u8Dst); \
4317 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4318 IEM_MC_LOCAL(uint32_t, uEFlags); \
4319 IEM_MC_FETCH_EFLAGS(uEFlags); \
4320 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u8Dst, u8Imm, uEFlags, 8, 8); \
4321 IEM_MC_COMMIT_EFLAGS(uEFlags); \
4322 } IEM_MC_NATIVE_ELSE() { \
4323 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4324 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
4325 IEM_MC_REF_GREG_U8_CONST(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4326 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4327 IEM_MC_REF_EFLAGS(pEFlags); \
4328 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
4329 } IEM_MC_NATIVE_ENDIF(); \
4330 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4331 IEM_MC_END(); \
4332 } \
4333 else \
4334 { \
4335 /* memory target */ \
4336 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4337 { \
4338 IEM_MC_BEGIN(3, 3, 0, 0); \
4339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4341 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4342 IEMOP_HLP_DONE_DECODING(); \
4343 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4344 IEM_MC_LOCAL(uint8_t, u8Dst); \
4345 IEM_MC_FETCH_MEM_U8(u8Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4346 IEM_MC_LOCAL(uint32_t, uEFlags); \
4347 IEM_MC_FETCH_EFLAGS(uEFlags); \
4348 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u8Dst, u8Imm, uEFlags, 8, 8); \
4349 IEM_MC_COMMIT_EFLAGS(uEFlags); \
4350 } IEM_MC_NATIVE_ELSE() { \
4351 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4352 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
4353 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4354 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4355 IEM_MC_FETCH_EFLAGS(EFlags); \
4356 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4357 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
4358 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4359 IEM_MC_COMMIT_EFLAGS(EFlags); \
4360 } IEM_MC_NATIVE_ENDIF(); \
4361 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4362 IEM_MC_END(); \
4363 } \
4364 else \
4365 { \
4366 IEMOP_HLP_DONE_DECODING(); \
4367 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4368 } \
4369 } \
4370 (void)0
4371
4372
4373
4374/**
4375 * @opmaps grp1_80,grp1_83
4376 * @opcode /0
4377 * @opflclass arithmetic
4378 */
4379FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
4380{
4381 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
4382 IEMOP_BODY_BINARY_Eb_Ib_RW(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4383}
4384
4385
4386/**
4387 * @opmaps grp1_80,grp1_83
4388 * @opcode /1
4389 * @opflclass logical
4390 */
4391FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
4392{
4393 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
4394 IEMOP_BODY_BINARY_Eb_Ib_RW(or, 0, 0);
4395}
4396
4397
4398/**
4399 * @opmaps grp1_80,grp1_83
4400 * @opcode /2
4401 * @opflclass arithmetic_carry
4402 */
4403FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4404{
4405 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4406 IEMOP_BODY_BINARY_Eb_Ib_RW(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4407}
4408
4409
4410/**
4411 * @opmaps grp1_80,grp1_83
4412 * @opcode /3
4413 * @opflclass arithmetic_carry
4414 */
4415FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4416{
4417 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4418 IEMOP_BODY_BINARY_Eb_Ib_RW(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4419}
4420
4421
4422/**
4423 * @opmaps grp1_80,grp1_83
4424 * @opcode /4
4425 * @opflclass logical
4426 */
4427FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4428{
4429 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4430 IEMOP_BODY_BINARY_Eb_Ib_RW(and, 0, 0);
4431}
4432
4433
4434/**
4435 * @opmaps grp1_80,grp1_83
4436 * @opcode /5
4437 * @opflclass arithmetic
4438 */
4439FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4440{
4441 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4442 IEMOP_BODY_BINARY_Eb_Ib_RW(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4443}
4444
4445
4446/**
4447 * @opmaps grp1_80,grp1_83
4448 * @opcode /6
4449 * @opflclass logical
4450 */
4451FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4452{
4453 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4454 IEMOP_BODY_BINARY_Eb_Ib_RW(xor, 0, 0);
4455}
4456
4457
4458/**
4459 * @opmaps grp1_80,grp1_83
4460 * @opcode /7
4461 * @opflclass arithmetic
4462 */
4463FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4464{
4465 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4466 IEMOP_BODY_BINARY_Eb_Ib_RO(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
4467}
4468
4469
4470/**
4471 * @opcode 0x80
4472 */
4473FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4474{
4475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4476 switch (IEM_GET_MODRM_REG_8(bRm))
4477 {
4478 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4479 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4480 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4481 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4482 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4483 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4484 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4485 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4486 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4487 }
4488}
4489
4490
4491/**
4492 * Body for a group 1 binary operator.
4493 */
4494#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_InsNm, a_fRegNativeArchs, a_fMemNativeArchs) \
4495 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4496 { \
4497 /* register target */ \
4498 switch (pVCpu->iem.s.enmEffOpSize) \
4499 { \
4500 case IEMMODE_16BIT: \
4501 { \
4502 IEM_MC_BEGIN(3, 2, 0, 0); \
4503 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4505 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
4506 IEM_MC_LOCAL(uint16_t, u16Dst); \
4507 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4508 IEM_MC_LOCAL(uint32_t, uEFlags); \
4509 IEM_MC_FETCH_EFLAGS(uEFlags); \
4510 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
4511 IEM_MC_COMMIT_EFLAGS(uEFlags); \
4512 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Dst); \
4513 } IEM_MC_NATIVE_ELSE() { \
4514 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4515 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4516 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4517 IEM_MC_REF_EFLAGS(pEFlags); \
4518 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4519 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
4520 } IEM_MC_NATIVE_ENDIF(); \
4521 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4522 IEM_MC_END(); \
4523 break; \
4524 } \
4525 \
4526 case IEMMODE_32BIT: \
4527 { \
4528 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0); \
4529 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4531 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
4532 IEM_MC_LOCAL(uint32_t, u32Dst); \
4533 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4534 IEM_MC_LOCAL(uint32_t, uEFlags); \
4535 IEM_MC_FETCH_EFLAGS(uEFlags); \
4536 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
4537 IEM_MC_COMMIT_EFLAGS(uEFlags); \
4538 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst); \
4539 } IEM_MC_NATIVE_ELSE() { \
4540 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4541 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4542 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4543 IEM_MC_REF_EFLAGS(pEFlags); \
4544 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4545 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
4546 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4547 } IEM_MC_NATIVE_ENDIF(); \
4548 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4549 IEM_MC_END(); \
4550 break; \
4551 } \
4552 \
4553 case IEMMODE_64BIT: \
4554 { \
4555 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0); \
4556 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4558 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
4559 IEM_MC_LOCAL(uint64_t, u64Dst); \
4560 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4561 IEM_MC_LOCAL(uint32_t, uEFlags); \
4562 IEM_MC_FETCH_EFLAGS(uEFlags); \
4563 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
4564 IEM_MC_COMMIT_EFLAGS(uEFlags); \
4565 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst); \
4566 } IEM_MC_NATIVE_ELSE() { \
4567 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4568 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4569 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4570 IEM_MC_REF_EFLAGS(pEFlags); \
4571 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4572 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
4573 } IEM_MC_NATIVE_ENDIF(); \
4574 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4575 IEM_MC_END(); \
4576 break; \
4577 } \
4578 \
4579 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4580 } \
4581 } \
4582 else \
4583 { \
4584 /* memory target */ \
4585 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4586 { \
4587 switch (pVCpu->iem.s.enmEffOpSize) \
4588 { \
4589 case IEMMODE_16BIT: \
4590 { \
4591 IEM_MC_BEGIN(3, 3, 0, 0); \
4592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4594 \
4595 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4596 IEMOP_HLP_DONE_DECODING(); \
4597 \
4598 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4599 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4600 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4601 \
4602 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4603 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4604 IEM_MC_FETCH_EFLAGS(EFlags); \
4605 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
4606 \
4607 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4608 IEM_MC_COMMIT_EFLAGS(EFlags); \
4609 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4610 IEM_MC_END(); \
4611 break; \
4612 } \
4613 \
4614 case IEMMODE_32BIT: \
4615 { \
4616 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4619 \
4620 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4621 IEMOP_HLP_DONE_DECODING(); \
4622 \
4623 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4624 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4625 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4626 \
4627 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4628 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4629 IEM_MC_FETCH_EFLAGS(EFlags); \
4630 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
4631 \
4632 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4633 IEM_MC_COMMIT_EFLAGS(EFlags); \
4634 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4635 IEM_MC_END(); \
4636 break; \
4637 } \
4638 \
4639 case IEMMODE_64BIT: \
4640 { \
4641 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4642 \
4643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4645 \
4646 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4647 IEMOP_HLP_DONE_DECODING(); \
4648 \
4649 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4650 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4651 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4652 \
4653 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4654 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4655 IEM_MC_FETCH_EFLAGS(EFlags); \
4656 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
4657 \
4658 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4659 IEM_MC_COMMIT_EFLAGS(EFlags); \
4660 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4661 IEM_MC_END(); \
4662 break; \
4663 } \
4664 \
4665 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4666 } \
4667 } \
4668 else \
4669 { \
4670 switch (pVCpu->iem.s.enmEffOpSize) \
4671 { \
4672 case IEMMODE_16BIT: \
4673 { \
4674 IEM_MC_BEGIN(3, 3, 0, 0); \
4675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4677 \
4678 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4679 IEMOP_HLP_DONE_DECODING(); \
4680 \
4681 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4682 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4683 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4684 \
4685 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4686 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4687 IEM_MC_FETCH_EFLAGS(EFlags); \
4688 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16_locked), pu16Dst, u16Src, pEFlags); \
4689 \
4690 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4691 IEM_MC_COMMIT_EFLAGS(EFlags); \
4692 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4693 IEM_MC_END(); \
4694 break; \
4695 } \
4696 \
4697 case IEMMODE_32BIT: \
4698 { \
4699 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4702 \
4703 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4704 IEMOP_HLP_DONE_DECODING(); \
4705 \
4706 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4707 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4708 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4709 \
4710 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4711 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4712 IEM_MC_FETCH_EFLAGS(EFlags); \
4713 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32_locked), pu32Dst, u32Src, pEFlags); \
4714 \
4715 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4716 IEM_MC_COMMIT_EFLAGS(EFlags); \
4717 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4718 IEM_MC_END(); \
4719 break; \
4720 } \
4721 \
4722 case IEMMODE_64BIT: \
4723 { \
4724 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4727 \
4728 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4729 IEMOP_HLP_DONE_DECODING(); \
4730 \
4731 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4732 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4733 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4734 \
4735 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4736 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4737 IEM_MC_FETCH_EFLAGS(EFlags); \
4738 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64_locked), pu64Dst, u64Src, pEFlags); \
4739 \
4740 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4741 IEM_MC_COMMIT_EFLAGS(EFlags); \
4742 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4743 IEM_MC_END(); \
4744 break; \
4745 } \
4746 \
4747 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4748 } \
4749 } \
4750 } \
4751 (void)0
4752
4753/* read-only version */
4754#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_InsNm, a_fNativeArchs) \
4755 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4756 { \
4757 /* register target */ \
4758 switch (pVCpu->iem.s.enmEffOpSize) \
4759 { \
4760 case IEMMODE_16BIT: \
4761 { \
4762 IEM_MC_BEGIN(3, 2, 0, 0); \
4763 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4765 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4766 IEM_MC_LOCAL(uint16_t, u16Dst); \
4767 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4768 IEM_MC_LOCAL(uint32_t, uEFlags); \
4769 IEM_MC_FETCH_EFLAGS(uEFlags); \
4770 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
4771 IEM_MC_COMMIT_EFLAGS(uEFlags); \
4772 } IEM_MC_NATIVE_ELSE() { \
4773 IEM_MC_ARG(uint16_t const *,pu16Dst, 0); \
4774 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4775 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4776 IEM_MC_REF_EFLAGS(pEFlags); \
4777 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4778 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
4779 } IEM_MC_NATIVE_ENDIF(); \
4780 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4781 IEM_MC_END(); \
4782 break; \
4783 } \
4784 \
4785 case IEMMODE_32BIT: \
4786 { \
4787 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0); \
4788 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4790 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4791 IEM_MC_LOCAL(uint32_t, u32Dst); \
4792 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4793 IEM_MC_LOCAL(uint32_t, uEFlags); \
4794 IEM_MC_FETCH_EFLAGS(uEFlags); \
4795 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
4796 IEM_MC_COMMIT_EFLAGS(uEFlags); \
4797 } IEM_MC_NATIVE_ELSE() { \
4798 IEM_MC_ARG(uint32_t const *,pu32Dst, 0); \
4799 IEM_MC_REF_GREG_U32_CONST (pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4800 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4801 IEM_MC_REF_EFLAGS(pEFlags); \
4802 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4803 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
4804 } IEM_MC_NATIVE_ENDIF(); \
4805 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4806 IEM_MC_END(); \
4807 break; \
4808 } \
4809 \
4810 case IEMMODE_64BIT: \
4811 { \
4812 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0); \
4813 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4815 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4816 IEM_MC_LOCAL(uint64_t, u64Dst); \
4817 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4818 IEM_MC_LOCAL(uint32_t, uEFlags); \
4819 IEM_MC_FETCH_EFLAGS(uEFlags); \
4820 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
4821 IEM_MC_COMMIT_EFLAGS(uEFlags); \
4822 } IEM_MC_NATIVE_ELSE() { \
4823 IEM_MC_ARG(uint64_t const *,pu64Dst, 0); \
4824 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4825 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4826 IEM_MC_REF_EFLAGS(pEFlags); \
4827 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4828 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
4829 } IEM_MC_NATIVE_ENDIF(); \
4830 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4831 IEM_MC_END(); \
4832 break; \
4833 } \
4834 \
4835 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4836 } \
4837 } \
4838 else \
4839 { \
4840 /* memory target */ \
4841 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4842 { \
4843 switch (pVCpu->iem.s.enmEffOpSize) \
4844 { \
4845 case IEMMODE_16BIT: \
4846 { \
4847 IEM_MC_BEGIN(3, 3, 0, 0); \
4848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4850 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4851 IEMOP_HLP_DONE_DECODING(); \
4852 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4853 IEM_MC_LOCAL(uint16_t, u16Dst); \
4854 IEM_MC_FETCH_MEM_U16(u16Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4855 IEM_MC_LOCAL(uint32_t, uEFlags); \
4856 IEM_MC_FETCH_EFLAGS(uEFlags); \
4857 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
4858 IEM_MC_COMMIT_EFLAGS(uEFlags); \
4859 } IEM_MC_NATIVE_ELSE() { \
4860 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4861 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4862 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4863 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4864 IEM_MC_FETCH_EFLAGS(EFlags); \
4865 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4866 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
4867 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4868 IEM_MC_COMMIT_EFLAGS(EFlags); \
4869 } IEM_MC_NATIVE_ENDIF(); \
4870 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4871 IEM_MC_END(); \
4872 break; \
4873 } \
4874 \
4875 case IEMMODE_32BIT: \
4876 { \
4877 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4880 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4881 IEMOP_HLP_DONE_DECODING(); \
4882 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4883 IEM_MC_LOCAL(uint32_t, u32Dst); \
4884 IEM_MC_FETCH_MEM_U32(u32Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4885 IEM_MC_LOCAL(uint32_t, uEFlags); \
4886 IEM_MC_FETCH_EFLAGS(uEFlags); \
4887 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
4888 IEM_MC_COMMIT_EFLAGS(uEFlags); \
4889 } IEM_MC_NATIVE_ELSE() { \
4890 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4891 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4892 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4893 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4894 IEM_MC_FETCH_EFLAGS(EFlags); \
4895 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4896 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
4897 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4898 IEM_MC_COMMIT_EFLAGS(EFlags); \
4899 } IEM_MC_NATIVE_ENDIF(); \
4900 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4901 IEM_MC_END(); \
4902 break; \
4903 } \
4904 \
4905 case IEMMODE_64BIT: \
4906 { \
4907 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4910 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4911 IEMOP_HLP_DONE_DECODING(); \
4912 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4913 IEM_MC_LOCAL(uint64_t, u64Dst); \
4914 IEM_MC_FETCH_MEM_U64(u64Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4915 IEM_MC_LOCAL(uint32_t, uEFlags); \
4916 IEM_MC_FETCH_EFLAGS(uEFlags); \
4917 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
4918 IEM_MC_COMMIT_EFLAGS(uEFlags); \
4919 } IEM_MC_NATIVE_ELSE() { \
4920 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4921 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4922 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4923 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4924 IEM_MC_FETCH_EFLAGS(EFlags); \
4925 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4926 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
4927 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4928 IEM_MC_COMMIT_EFLAGS(EFlags); \
4929 } IEM_MC_NATIVE_ENDIF(); \
4930 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4931 IEM_MC_END(); \
4932 break; \
4933 } \
4934 \
4935 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4936 } \
4937 } \
4938 else \
4939 { \
4940 IEMOP_HLP_DONE_DECODING(); \
4941 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4942 } \
4943 } \
4944 (void)0
4945
4946
4947/**
4948 * @opmaps grp1_81
4949 * @opcode /0
4950 * @opflclass arithmetic
4951 */
4952FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4953{
4954 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4955 IEMOP_BODY_BINARY_Ev_Iz_RW(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4956}
4957
4958
4959/**
4960 * @opmaps grp1_81
4961 * @opcode /1
4962 * @opflclass logical
4963 */
4964FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4965{
4966 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4967 IEMOP_BODY_BINARY_Ev_Iz_RW(or, 0, 0);
4968}
4969
4970
4971/**
4972 * @opmaps grp1_81
4973 * @opcode /2
4974 * @opflclass arithmetic_carry
4975 */
4976FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4977{
4978 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4979 IEMOP_BODY_BINARY_Ev_Iz_RW(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4980}
4981
4982
4983/**
4984 * @opmaps grp1_81
4985 * @opcode /3
4986 * @opflclass arithmetic_carry
4987 */
4988FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4989{
4990 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4991 IEMOP_BODY_BINARY_Ev_Iz_RW(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4992}
4993
4994
4995/**
4996 * @opmaps grp1_81
4997 * @opcode /4
4998 * @opflclass logical
4999 */
5000FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
5001{
5002 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
5003 IEMOP_BODY_BINARY_Ev_Iz_RW(and, 0, 0);
5004}
5005
5006
5007/**
5008 * @opmaps grp1_81
5009 * @opcode /5
5010 * @opflclass arithmetic
5011 */
5012FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
5013{
5014 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
5015 IEMOP_BODY_BINARY_Ev_Iz_RW(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5016}
5017
5018
5019/**
5020 * @opmaps grp1_81
5021 * @opcode /6
5022 * @opflclass logical
5023 */
5024FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
5025{
5026 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
5027 IEMOP_BODY_BINARY_Ev_Iz_RW(xor, 0, 0);
5028}
5029
5030
5031/**
5032 * @opmaps grp1_81
5033 * @opcode /7
5034 * @opflclass arithmetic
5035 */
5036FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
5037{
5038 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
5039 IEMOP_BODY_BINARY_Ev_Iz_RO(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5040}
5041
5042
5043/**
5044 * @opcode 0x81
5045 */
5046FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
5047{
5048 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5049 switch (IEM_GET_MODRM_REG_8(bRm))
5050 {
5051 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
5052 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
5053 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
5054 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
5055 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
5056 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
5057 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
5058 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
5059 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5060 }
5061}
5062
5063
5064/**
5065 * @opcode 0x82
5066 * @opmnemonic grp1_82
5067 * @opgroup og_groups
5068 */
5069FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
5070{
5071 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
5072 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
5073}
5074
5075
5076/**
5077 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
5078 * iemOp_Grp1_Ev_Ib.
5079 */
5080#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_InsNm, a_fRegNativeArchs, a_fMemNativeArchs) \
5081 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5082 { \
5083 /* \
5084 * Register target \
5085 */ \
5086 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5087 switch (pVCpu->iem.s.enmEffOpSize) \
5088 { \
5089 case IEMMODE_16BIT: \
5090 IEM_MC_BEGIN(3, 0, 0, 0); \
5091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5092 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
5093 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5094 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5095 \
5096 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5097 IEM_MC_REF_EFLAGS(pEFlags); \
5098 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
5099 \
5100 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5101 IEM_MC_END(); \
5102 break; \
5103 \
5104 case IEMMODE_32BIT: \
5105 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
5106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5107 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
5108 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5109 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5110 \
5111 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5112 IEM_MC_REF_EFLAGS(pEFlags); \
5113 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
5114 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
5115 \
5116 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5117 IEM_MC_END(); \
5118 break; \
5119 \
5120 case IEMMODE_64BIT: \
5121 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
5122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5123 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
5124 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5125 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5126 \
5127 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5128 IEM_MC_REF_EFLAGS(pEFlags); \
5129 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
5130 \
5131 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5132 IEM_MC_END(); \
5133 break; \
5134 \
5135 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5136 } \
5137 } \
5138 else \
5139 { \
5140 /* \
5141 * Memory target. \
5142 */ \
5143 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
5144 { \
5145 switch (pVCpu->iem.s.enmEffOpSize) \
5146 { \
5147 case IEMMODE_16BIT: \
5148 IEM_MC_BEGIN(3, 3, 0, 0); \
5149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5151 \
5152 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5153 IEMOP_HLP_DONE_DECODING(); \
5154 \
5155 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5156 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
5157 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5158 \
5159 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5160 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5161 IEM_MC_FETCH_EFLAGS(EFlags); \
5162 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
5163 \
5164 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
5165 IEM_MC_COMMIT_EFLAGS(EFlags); \
5166 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5167 IEM_MC_END(); \
5168 break; \
5169 \
5170 case IEMMODE_32BIT: \
5171 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
5172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5174 \
5175 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5176 IEMOP_HLP_DONE_DECODING(); \
5177 \
5178 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5179 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
5180 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5181 \
5182 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5183 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5184 IEM_MC_FETCH_EFLAGS(EFlags); \
5185 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
5186 \
5187 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
5188 IEM_MC_COMMIT_EFLAGS(EFlags); \
5189 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5190 IEM_MC_END(); \
5191 break; \
5192 \
5193 case IEMMODE_64BIT: \
5194 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5197 \
5198 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5199 IEMOP_HLP_DONE_DECODING(); \
5200 \
5201 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5202 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
5203 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5204 \
5205 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5206 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5207 IEM_MC_FETCH_EFLAGS(EFlags); \
5208 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
5209 \
5210 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
5211 IEM_MC_COMMIT_EFLAGS(EFlags); \
5212 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5213 IEM_MC_END(); \
5214 break; \
5215 \
5216 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5217 } \
5218 } \
5219 else \
5220 { \
5221 switch (pVCpu->iem.s.enmEffOpSize) \
5222 { \
5223 case IEMMODE_16BIT: \
5224 IEM_MC_BEGIN(3, 3, 0, 0); \
5225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5227 \
5228 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5229 IEMOP_HLP_DONE_DECODING(); \
5230 \
5231 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5232 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
5233 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5234 \
5235 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5236 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5237 IEM_MC_FETCH_EFLAGS(EFlags); \
5238 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16_locked), pu16Dst, u16Src, pEFlags); \
5239 \
5240 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5241 IEM_MC_COMMIT_EFLAGS(EFlags); \
5242 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5243 IEM_MC_END(); \
5244 break; \
5245 \
5246 case IEMMODE_32BIT: \
5247 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
5248 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5250 \
5251 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5252 IEMOP_HLP_DONE_DECODING(); \
5253 \
5254 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5255 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
5256 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5257 \
5258 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5259 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5260 IEM_MC_FETCH_EFLAGS(EFlags); \
5261 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32_locked), pu32Dst, u32Src, pEFlags); \
5262 \
5263 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5264 IEM_MC_COMMIT_EFLAGS(EFlags); \
5265 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5266 IEM_MC_END(); \
5267 break; \
5268 \
5269 case IEMMODE_64BIT: \
5270 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5273 \
5274 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5275 IEMOP_HLP_DONE_DECODING(); \
5276 \
5277 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5278 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
5279 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5280 \
5281 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5282 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5283 IEM_MC_FETCH_EFLAGS(EFlags); \
5284 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64_locked), pu64Dst, u64Src, pEFlags); \
5285 \
5286 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5287 IEM_MC_COMMIT_EFLAGS(EFlags); \
5288 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5289 IEM_MC_END(); \
5290 break; \
5291 \
5292 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5293 } \
5294 } \
5295 } \
5296 (void)0
5297
5298/* read-only variant */
5299#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_InsNm, a_fNativeArchs) \
5300 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5301 { \
5302 /* \
5303 * Register target \
5304 */ \
5305 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5306 switch (pVCpu->iem.s.enmEffOpSize) \
5307 { \
5308 case IEMMODE_16BIT: \
5309 IEM_MC_BEGIN(3, 0, 0, 0); \
5310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5311 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
5312 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5313 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5314 \
5315 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5316 IEM_MC_REF_EFLAGS(pEFlags); \
5317 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
5318 \
5319 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5320 IEM_MC_END(); \
5321 break; \
5322 \
5323 case IEMMODE_32BIT: \
5324 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
5325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5326 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
5327 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5328 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5329 \
5330 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5331 IEM_MC_REF_EFLAGS(pEFlags); \
5332 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
5333 \
5334 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5335 IEM_MC_END(); \
5336 break; \
5337 \
5338 case IEMMODE_64BIT: \
5339 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
5340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5341 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
5342 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5343 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5344 \
5345 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5346 IEM_MC_REF_EFLAGS(pEFlags); \
5347 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
5348 \
5349 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5350 IEM_MC_END(); \
5351 break; \
5352 \
5353 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5354 } \
5355 } \
5356 else \
5357 { \
5358 /* \
5359 * Memory target. \
5360 */ \
5361 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
5362 { \
5363 switch (pVCpu->iem.s.enmEffOpSize) \
5364 { \
5365 case IEMMODE_16BIT: \
5366 IEM_MC_BEGIN(3, 3, 0, 0); \
5367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5369 \
5370 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5371 IEMOP_HLP_DONE_DECODING(); \
5372 \
5373 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5374 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
5375 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5376 \
5377 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5378 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5379 IEM_MC_FETCH_EFLAGS(EFlags); \
5380 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
5381 \
5382 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5383 IEM_MC_COMMIT_EFLAGS(EFlags); \
5384 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5385 IEM_MC_END(); \
5386 break; \
5387 \
5388 case IEMMODE_32BIT: \
5389 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
5390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5392 \
5393 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5394 IEMOP_HLP_DONE_DECODING(); \
5395 \
5396 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5397 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
5398 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5399 \
5400 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5401 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5402 IEM_MC_FETCH_EFLAGS(EFlags); \
5403 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
5404 \
5405 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5406 IEM_MC_COMMIT_EFLAGS(EFlags); \
5407 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5408 IEM_MC_END(); \
5409 break; \
5410 \
5411 case IEMMODE_64BIT: \
5412 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5415 \
5416 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5417 IEMOP_HLP_DONE_DECODING(); \
5418 \
5419 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5420 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
5421 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5422 \
5423 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5424 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5425 IEM_MC_FETCH_EFLAGS(EFlags); \
5426 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
5427 \
5428 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5429 IEM_MC_COMMIT_EFLAGS(EFlags); \
5430 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5431 IEM_MC_END(); \
5432 break; \
5433 \
5434 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5435 } \
5436 } \
5437 else \
5438 { \
5439 IEMOP_HLP_DONE_DECODING(); \
5440 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5441 } \
5442 } \
5443 (void)0
5444
5445/**
5446 * @opmaps grp1_83
5447 * @opcode /0
5448 * @opflclass arithmetic
5449 */
5450FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5451{
5452 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5453 IEMOP_BODY_BINARY_Ev_Ib_RW(add, 0, 0);
5454}
5455
5456
5457/**
5458 * @opmaps grp1_83
5459 * @opcode /1
5460 * @opflclass logical
5461 */
5462FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5463{
5464 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5465 IEMOP_BODY_BINARY_Ev_Ib_RW(or, 0, 0);
5466}
5467
5468
5469/**
5470 * @opmaps grp1_83
5471 * @opcode /2
5472 * @opflclass arithmetic_carry
5473 */
5474FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5475{
5476 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5477 IEMOP_BODY_BINARY_Ev_Ib_RW(adc, 0, 0);
5478}
5479
5480
5481/**
5482 * @opmaps grp1_83
5483 * @opcode /3
5484 * @opflclass arithmetic_carry
5485 */
5486FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5487{
5488 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5489 IEMOP_BODY_BINARY_Ev_Ib_RW(sbb, 0, 0);
5490}
5491
5492
5493/**
5494 * @opmaps grp1_83
5495 * @opcode /4
5496 * @opflclass logical
5497 */
5498FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5499{
5500 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5501 IEMOP_BODY_BINARY_Ev_Ib_RW(and, 0, 0);
5502}
5503
5504
5505/**
5506 * @opmaps grp1_83
5507 * @opcode /5
5508 * @opflclass arithmetic
5509 */
5510FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5511{
5512 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5513 IEMOP_BODY_BINARY_Ev_Ib_RW(sub, 0, 0);
5514}
5515
5516
5517/**
5518 * @opmaps grp1_83
5519 * @opcode /6
5520 * @opflclass logical
5521 */
5522FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5523{
5524 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5525 IEMOP_BODY_BINARY_Ev_Ib_RW(xor, 0, 0);
5526}
5527
5528
5529/**
5530 * @opmaps grp1_83
5531 * @opcode /7
5532 * @opflclass arithmetic
5533 */
5534FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5535{
5536 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5537 IEMOP_BODY_BINARY_Ev_Ib_RO(cmp, 0);
5538}
5539
5540
5541/**
5542 * @opcode 0x83
5543 */
5544FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5545{
5546 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5547 to the 386 even if absent in the intel reference manuals and some
5548 3rd party opcode listings. */
5549 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5550 switch (IEM_GET_MODRM_REG_8(bRm))
5551 {
5552 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5553 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5554 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5555 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5556 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5557 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5558 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5559 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5560 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5561 }
5562}
5563
5564
5565/**
5566 * @opcode 0x84
5567 * @opflclass logical
5568 */
5569FNIEMOP_DEF(iemOp_test_Eb_Gb)
5570{
5571 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5572 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5573
5574 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5575
5576 /*
5577 * Deal with special case of 'test rN, rN' which is frequently used for testing for zero/non-zero registers.
5578 * This block only makes a differences when emitting native code, where we'll save a register fetch.
5579 */
5580 if ( (bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT))
5581 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB)
5582 {
5583 IEM_MC_BEGIN(3, 0, 0, 0);
5584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5585 IEM_MC_ARG(uint8_t, u8Src, 1);
5586 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5587 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5588 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */
5589 IEM_MC_LOCAL(uint32_t, uEFlags);
5590 IEM_MC_FETCH_EFLAGS(uEFlags);
5591 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u8Src, u8Src, uEFlags, 8);
5592 IEM_MC_COMMIT_EFLAGS(uEFlags);
5593 } IEM_MC_NATIVE_ELSE() {
5594 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5595 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* == IEM_GET_MODRM_RM(pVCpu, bRm) */
5596 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5597 IEM_MC_REF_EFLAGS(pEFlags);
5598 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
5599 } IEM_MC_NATIVE_ENDIF();
5600 IEM_MC_ADVANCE_RIP_AND_FINISH();
5601 IEM_MC_END();
5602 }
5603
5604 IEMOP_BODY_BINARY_rm_r8_RO(bRm, iemAImpl_test_u8, test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5605}
5606
5607
5608/**
5609 * @opcode 0x85
5610 * @opflclass logical
5611 */
5612FNIEMOP_DEF(iemOp_test_Ev_Gv)
5613{
5614 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5615 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5616
5617 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5618
5619 /*
5620 * Deal with special case of 'test rN, rN' which is frequently used for testing for zero/non-zero registers.
5621 * This block only makes a differences when emitting native code, where we'll save a register fetch.
5622 */
5623 if ( (bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT))
5624 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB)
5625 {
5626 switch (pVCpu->iem.s.enmEffOpSize)
5627 {
5628 case IEMMODE_16BIT:
5629 IEM_MC_BEGIN(3, 0, 0, 0);
5630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5631 IEM_MC_ARG(uint16_t, u16Src, 1);
5632 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5633 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5634 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */
5635 IEM_MC_LOCAL(uint32_t, uEFlags);
5636 IEM_MC_FETCH_EFLAGS(uEFlags);
5637 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u16Src, u16Src, uEFlags, 16);
5638 IEM_MC_COMMIT_EFLAGS(uEFlags);
5639 } IEM_MC_NATIVE_ELSE() {
5640 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5641 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* == IEM_GET_MODRM_RM(pVCpu, bRm) */
5642 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5643 IEM_MC_REF_EFLAGS(pEFlags);
5644 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
5645 } IEM_MC_NATIVE_ENDIF();
5646 IEM_MC_ADVANCE_RIP_AND_FINISH();
5647 IEM_MC_END();
5648 break;
5649
5650 case IEMMODE_32BIT:
5651 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
5652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5653 IEM_MC_ARG(uint32_t, u32Src, 1);
5654 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5655 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5656 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */
5657 IEM_MC_LOCAL(uint32_t, uEFlags);
5658 IEM_MC_FETCH_EFLAGS(uEFlags);
5659 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u32Src, u32Src, uEFlags, 32);
5660 IEM_MC_COMMIT_EFLAGS(uEFlags);
5661 } IEM_MC_NATIVE_ELSE() {
5662 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5663 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* == IEM_GET_MODRM_RM(pVCpu, bRm) */
5664 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5665 IEM_MC_REF_EFLAGS(pEFlags);
5666 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
5667 } IEM_MC_NATIVE_ENDIF();
5668 IEM_MC_ADVANCE_RIP_AND_FINISH();
5669 IEM_MC_END();
5670 break;
5671
5672 case IEMMODE_64BIT:
5673 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
5674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5675 IEM_MC_ARG(uint64_t, u64Src, 1);
5676 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5677 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5678 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */
5679 IEM_MC_LOCAL(uint32_t, uEFlags);
5680 IEM_MC_FETCH_EFLAGS(uEFlags);
5681 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u64Src, u64Src, uEFlags, 64);
5682 IEM_MC_COMMIT_EFLAGS(uEFlags);
5683 } IEM_MC_NATIVE_ELSE() {
5684 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5685 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* == IEM_GET_MODRM_RM(pVCpu, bRm) */
5686 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5687 IEM_MC_REF_EFLAGS(pEFlags);
5688 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
5689 } IEM_MC_NATIVE_ENDIF();
5690 IEM_MC_ADVANCE_RIP_AND_FINISH();
5691 IEM_MC_END();
5692 break;
5693
5694 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5695 }
5696 }
5697
5698 IEMOP_BODY_BINARY_rm_rv_RO(bRm, test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5699}
5700
5701
5702/**
5703 * @opcode 0x86
5704 */
5705FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5706{
5707 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5708 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5709
5710 /*
5711 * If rm is denoting a register, no more instruction bytes.
5712 */
5713 if (IEM_IS_MODRM_REG_MODE(bRm))
5714 {
5715 IEM_MC_BEGIN(0, 2, 0, 0);
5716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5717 IEM_MC_LOCAL(uint8_t, uTmp1);
5718 IEM_MC_LOCAL(uint8_t, uTmp2);
5719
5720 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5721 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5722 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5723 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5724
5725 IEM_MC_ADVANCE_RIP_AND_FINISH();
5726 IEM_MC_END();
5727 }
5728 else
5729 {
5730 /*
5731 * We're accessing memory.
5732 */
5733#define IEMOP_XCHG_BYTE(a_fnWorker, a_Style) \
5734 IEM_MC_BEGIN(2, 4, 0, 0); \
5735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5736 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5737 IEM_MC_LOCAL(uint8_t, uTmpReg); \
5738 IEM_MC_ARG(uint8_t *, pu8Mem, 0); \
5739 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1); \
5740 \
5741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5742 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5743 IEM_MC_MEM_MAP_U8_##a_Style(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5744 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5745 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker, pu8Mem, pu8Reg); \
5746 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Style(bUnmapInfo); \
5747 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5748 \
5749 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5750 IEM_MC_END()
5751
5752 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5753 {
5754 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_locked,ATOMIC);
5755 }
5756 else
5757 {
5758 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_unlocked,RW);
5759 }
5760 }
5761}
5762
5763
5764/**
5765 * @opcode 0x87
5766 */
5767FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5768{
5769 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5771
5772 /*
5773 * If rm is denoting a register, no more instruction bytes.
5774 */
5775 if (IEM_IS_MODRM_REG_MODE(bRm))
5776 {
5777 switch (pVCpu->iem.s.enmEffOpSize)
5778 {
5779 case IEMMODE_16BIT:
5780 IEM_MC_BEGIN(0, 2, 0, 0);
5781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5782 IEM_MC_LOCAL(uint16_t, uTmp1);
5783 IEM_MC_LOCAL(uint16_t, uTmp2);
5784
5785 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5786 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5787 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5788 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5789
5790 IEM_MC_ADVANCE_RIP_AND_FINISH();
5791 IEM_MC_END();
5792 break;
5793
5794 case IEMMODE_32BIT:
5795 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5797 IEM_MC_LOCAL(uint32_t, uTmp1);
5798 IEM_MC_LOCAL(uint32_t, uTmp2);
5799
5800 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5801 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5802 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5803 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5804
5805 IEM_MC_ADVANCE_RIP_AND_FINISH();
5806 IEM_MC_END();
5807 break;
5808
5809 case IEMMODE_64BIT:
5810 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5812 IEM_MC_LOCAL(uint64_t, uTmp1);
5813 IEM_MC_LOCAL(uint64_t, uTmp2);
5814
5815 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5816 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5817 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5818 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5819
5820 IEM_MC_ADVANCE_RIP_AND_FINISH();
5821 IEM_MC_END();
5822 break;
5823
5824 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5825 }
5826 }
5827 else
5828 {
5829 /*
5830 * We're accessing memory.
5831 */
5832#define IEMOP_XCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
5833 do { \
5834 switch (pVCpu->iem.s.enmEffOpSize) \
5835 { \
5836 case IEMMODE_16BIT: \
5837 IEM_MC_BEGIN(2, 4, 0, 0); \
5838 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5839 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5840 IEM_MC_LOCAL(uint16_t, uTmpReg); \
5841 IEM_MC_ARG(uint16_t *, pu16Mem, 0); \
5842 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1); \
5843 \
5844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5845 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5846 IEM_MC_MEM_MAP_U16_##a_Type(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5847 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5848 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker16, pu16Mem, pu16Reg); \
5849 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5850 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5851 \
5852 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5853 IEM_MC_END(); \
5854 break; \
5855 \
5856 case IEMMODE_32BIT: \
5857 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0); \
5858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5859 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5860 IEM_MC_LOCAL(uint32_t, uTmpReg); \
5861 IEM_MC_ARG(uint32_t *, pu32Mem, 0); \
5862 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1); \
5863 \
5864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5865 IEMOP_HLP_DONE_DECODING(); \
5866 IEM_MC_MEM_MAP_U32_##a_Type(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5867 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5868 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker32, pu32Mem, pu32Reg); \
5869 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5870 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5871 \
5872 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5873 IEM_MC_END(); \
5874 break; \
5875 \
5876 case IEMMODE_64BIT: \
5877 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0); \
5878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5879 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5880 IEM_MC_LOCAL(uint64_t, uTmpReg); \
5881 IEM_MC_ARG(uint64_t *, pu64Mem, 0); \
5882 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1); \
5883 \
5884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5885 IEMOP_HLP_DONE_DECODING(); \
5886 IEM_MC_MEM_MAP_U64_##a_Type(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5887 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5888 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker64, pu64Mem, pu64Reg); \
5889 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5890 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5891 \
5892 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5893 IEM_MC_END(); \
5894 break; \
5895 \
5896 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5897 } \
5898 } while (0)
5899 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5900 {
5901 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_locked, iemAImpl_xchg_u32_locked, iemAImpl_xchg_u64_locked,ATOMIC);
5902 }
5903 else
5904 {
5905 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_unlocked, iemAImpl_xchg_u32_unlocked, iemAImpl_xchg_u64_unlocked,RW);
5906 }
5907 }
5908}
5909
5910
5911/**
5912 * @opcode 0x88
5913 */
5914FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5915{
5916 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5917
5918 uint8_t bRm;
5919 IEM_OPCODE_GET_NEXT_U8(&bRm);
5920
5921 /*
5922 * If rm is denoting a register, no more instruction bytes.
5923 */
5924 if (IEM_IS_MODRM_REG_MODE(bRm))
5925 {
5926 IEM_MC_BEGIN(0, 1, 0, 0);
5927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5928 IEM_MC_LOCAL(uint8_t, u8Value);
5929 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5930 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5931 IEM_MC_ADVANCE_RIP_AND_FINISH();
5932 IEM_MC_END();
5933 }
5934 else
5935 {
5936 /*
5937 * We're writing a register to memory.
5938 */
5939 IEM_MC_BEGIN(0, 2, 0, 0);
5940 IEM_MC_LOCAL(uint8_t, u8Value);
5941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5944 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5945 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5946 IEM_MC_ADVANCE_RIP_AND_FINISH();
5947 IEM_MC_END();
5948 }
5949}
5950
5951
5952/**
5953 * @opcode 0x89
5954 */
5955FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5956{
5957 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5958
5959 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5960
5961 /*
5962 * If rm is denoting a register, no more instruction bytes.
5963 */
5964 if (IEM_IS_MODRM_REG_MODE(bRm))
5965 {
5966 switch (pVCpu->iem.s.enmEffOpSize)
5967 {
5968 case IEMMODE_16BIT:
5969 IEM_MC_BEGIN(0, 1, 0, 0);
5970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5971 IEM_MC_LOCAL(uint16_t, u16Value);
5972 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5973 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5974 IEM_MC_ADVANCE_RIP_AND_FINISH();
5975 IEM_MC_END();
5976 break;
5977
5978 case IEMMODE_32BIT:
5979 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5981 IEM_MC_LOCAL(uint32_t, u32Value);
5982 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5983 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5984 IEM_MC_ADVANCE_RIP_AND_FINISH();
5985 IEM_MC_END();
5986 break;
5987
5988 case IEMMODE_64BIT:
5989 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5991 IEM_MC_LOCAL(uint64_t, u64Value);
5992 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5993 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5994 IEM_MC_ADVANCE_RIP_AND_FINISH();
5995 IEM_MC_END();
5996 break;
5997
5998 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5999 }
6000 }
6001 else
6002 {
6003 /*
6004 * We're writing a register to memory.
6005 */
6006 switch (pVCpu->iem.s.enmEffOpSize)
6007 {
6008 case IEMMODE_16BIT:
6009 IEM_MC_BEGIN(0, 2, 0, 0);
6010 IEM_MC_LOCAL(uint16_t, u16Value);
6011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6014 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6015 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
6016 IEM_MC_ADVANCE_RIP_AND_FINISH();
6017 IEM_MC_END();
6018 break;
6019
6020 case IEMMODE_32BIT:
6021 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6022 IEM_MC_LOCAL(uint32_t, u32Value);
6023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6026 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6027 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
6028 IEM_MC_ADVANCE_RIP_AND_FINISH();
6029 IEM_MC_END();
6030 break;
6031
6032 case IEMMODE_64BIT:
6033 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6034 IEM_MC_LOCAL(uint64_t, u64Value);
6035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6038 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6039 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
6040 IEM_MC_ADVANCE_RIP_AND_FINISH();
6041 IEM_MC_END();
6042 break;
6043
6044 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6045 }
6046 }
6047}
6048
6049
6050/**
6051 * @opcode 0x8a
6052 */
6053FNIEMOP_DEF(iemOp_mov_Gb_Eb)
6054{
6055 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
6056
6057 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6058
6059 /*
6060 * If rm is denoting a register, no more instruction bytes.
6061 */
6062 if (IEM_IS_MODRM_REG_MODE(bRm))
6063 {
6064 IEM_MC_BEGIN(0, 1, 0, 0);
6065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6066 IEM_MC_LOCAL(uint8_t, u8Value);
6067 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
6068 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
6069 IEM_MC_ADVANCE_RIP_AND_FINISH();
6070 IEM_MC_END();
6071 }
6072 else
6073 {
6074 /*
6075 * We're loading a register from memory.
6076 */
6077 IEM_MC_BEGIN(0, 2, 0, 0);
6078 IEM_MC_LOCAL(uint8_t, u8Value);
6079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6082 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6083 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
6084 IEM_MC_ADVANCE_RIP_AND_FINISH();
6085 IEM_MC_END();
6086 }
6087}
6088
6089
6090/**
6091 * @opcode 0x8b
6092 */
6093FNIEMOP_DEF(iemOp_mov_Gv_Ev)
6094{
6095 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
6096
6097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6098
6099 /*
6100 * If rm is denoting a register, no more instruction bytes.
6101 */
6102 if (IEM_IS_MODRM_REG_MODE(bRm))
6103 {
6104 switch (pVCpu->iem.s.enmEffOpSize)
6105 {
6106 case IEMMODE_16BIT:
6107 IEM_MC_BEGIN(0, 1, 0, 0);
6108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6109 IEM_MC_LOCAL(uint16_t, u16Value);
6110 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
6111 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
6112 IEM_MC_ADVANCE_RIP_AND_FINISH();
6113 IEM_MC_END();
6114 break;
6115
6116 case IEMMODE_32BIT:
6117 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6119 IEM_MC_LOCAL(uint32_t, u32Value);
6120 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
6121 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
6122 IEM_MC_ADVANCE_RIP_AND_FINISH();
6123 IEM_MC_END();
6124 break;
6125
6126 case IEMMODE_64BIT:
6127 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6129 IEM_MC_LOCAL(uint64_t, u64Value);
6130 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
6131 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
6132 IEM_MC_ADVANCE_RIP_AND_FINISH();
6133 IEM_MC_END();
6134 break;
6135
6136 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6137 }
6138 }
6139 else
6140 {
6141 /*
6142 * We're loading a register from memory.
6143 */
6144 switch (pVCpu->iem.s.enmEffOpSize)
6145 {
6146 case IEMMODE_16BIT:
6147 IEM_MC_BEGIN(0, 2, 0, 0);
6148 IEM_MC_LOCAL(uint16_t, u16Value);
6149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6152 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6153 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
6154 IEM_MC_ADVANCE_RIP_AND_FINISH();
6155 IEM_MC_END();
6156 break;
6157
6158 case IEMMODE_32BIT:
6159 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6160 IEM_MC_LOCAL(uint32_t, u32Value);
6161 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6164 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6165 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
6166 IEM_MC_ADVANCE_RIP_AND_FINISH();
6167 IEM_MC_END();
6168 break;
6169
6170 case IEMMODE_64BIT:
6171 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6172 IEM_MC_LOCAL(uint64_t, u64Value);
6173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6176 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6177 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
6178 IEM_MC_ADVANCE_RIP_AND_FINISH();
6179 IEM_MC_END();
6180 break;
6181
6182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6183 }
6184 }
6185}
6186
6187
6188/**
6189 * opcode 0x63
6190 * @todo Table fixme
6191 */
6192FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
6193{
6194 if (!IEM_IS_64BIT_CODE(pVCpu))
6195 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
6196 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6197 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
6198 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
6199}
6200
6201
6202/**
6203 * @opcode 0x8c
6204 */
6205FNIEMOP_DEF(iemOp_mov_Ev_Sw)
6206{
6207 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
6208
6209 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6210
6211 /*
6212 * Check that the destination register exists. The REX.R prefix is ignored.
6213 */
6214 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
6215 if (iSegReg > X86_SREG_GS)
6216 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6217
6218 /*
6219 * If rm is denoting a register, no more instruction bytes.
6220 * In that case, the operand size is respected and the upper bits are
6221 * cleared (starting with some pentium).
6222 */
6223 if (IEM_IS_MODRM_REG_MODE(bRm))
6224 {
6225 switch (pVCpu->iem.s.enmEffOpSize)
6226 {
6227 case IEMMODE_16BIT:
6228 IEM_MC_BEGIN(0, 1, 0, 0);
6229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6230 IEM_MC_LOCAL(uint16_t, u16Value);
6231 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
6232 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
6233 IEM_MC_ADVANCE_RIP_AND_FINISH();
6234 IEM_MC_END();
6235 break;
6236
6237 case IEMMODE_32BIT:
6238 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6240 IEM_MC_LOCAL(uint32_t, u32Value);
6241 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
6242 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
6243 IEM_MC_ADVANCE_RIP_AND_FINISH();
6244 IEM_MC_END();
6245 break;
6246
6247 case IEMMODE_64BIT:
6248 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6250 IEM_MC_LOCAL(uint64_t, u64Value);
6251 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
6252 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
6253 IEM_MC_ADVANCE_RIP_AND_FINISH();
6254 IEM_MC_END();
6255 break;
6256
6257 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6258 }
6259 }
6260 else
6261 {
6262 /*
6263 * We're saving the register to memory. The access is word sized
6264 * regardless of operand size prefixes.
6265 */
6266#if 0 /* not necessary */
6267 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
6268#endif
6269 IEM_MC_BEGIN(0, 2, 0, 0);
6270 IEM_MC_LOCAL(uint16_t, u16Value);
6271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6274 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
6275 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
6276 IEM_MC_ADVANCE_RIP_AND_FINISH();
6277 IEM_MC_END();
6278 }
6279}
6280
6281
6282
6283
6284/**
6285 * @opcode 0x8d
6286 */
6287FNIEMOP_DEF(iemOp_lea_Gv_M)
6288{
6289 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
6290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6291 if (IEM_IS_MODRM_REG_MODE(bRm))
6292 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
6293
6294 switch (pVCpu->iem.s.enmEffOpSize)
6295 {
6296 case IEMMODE_16BIT:
6297 IEM_MC_BEGIN(0, 2, 0, 0);
6298 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6301 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
6302 * operand-size, which is usually the case. It'll save an instruction
6303 * and a register. */
6304 IEM_MC_LOCAL(uint16_t, u16Cast);
6305 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
6306 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
6307 IEM_MC_ADVANCE_RIP_AND_FINISH();
6308 IEM_MC_END();
6309 break;
6310
6311 case IEMMODE_32BIT:
6312 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6314 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6316 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
6317 * operand-size, which is usually the case. It'll save an instruction
6318 * and a register. */
6319 IEM_MC_LOCAL(uint32_t, u32Cast);
6320 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
6321 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
6322 IEM_MC_ADVANCE_RIP_AND_FINISH();
6323 IEM_MC_END();
6324 break;
6325
6326 case IEMMODE_64BIT:
6327 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6331 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
6332 IEM_MC_ADVANCE_RIP_AND_FINISH();
6333 IEM_MC_END();
6334 break;
6335
6336 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6337 }
6338}
6339
6340
6341/**
6342 * @opcode 0x8e
6343 */
6344FNIEMOP_DEF(iemOp_mov_Sw_Ev)
6345{
6346 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
6347
6348 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6349
6350 /*
6351 * The practical operand size is 16-bit.
6352 */
6353#if 0 /* not necessary */
6354 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
6355#endif
6356
6357 /*
6358 * Check that the destination register exists and can be used with this
6359 * instruction. The REX.R prefix is ignored.
6360 */
6361 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
6362 /** @todo r=bird: What does 8086 do here wrt CS? */
6363 if ( iSegReg == X86_SREG_CS
6364 || iSegReg > X86_SREG_GS)
6365 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6366
6367 /*
6368 * If rm is denoting a register, no more instruction bytes.
6369 *
6370 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
6371 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
6372 * register. This is a restriction of the current recompiler
6373 * approach.
6374 */
6375 if (IEM_IS_MODRM_REG_MODE(bRm))
6376 {
6377#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
6378 IEM_MC_BEGIN(2, 0, 0, a_fCImplFlags); \
6379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6380 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
6381 IEM_MC_ARG(uint16_t, u16Value, 1); \
6382 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
6383 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
6384 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
6385 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
6386 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
6387 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
6388 iemCImpl_load_SReg, iSRegArg, u16Value); \
6389 IEM_MC_END()
6390
6391 if (iSegReg == X86_SREG_SS)
6392 {
6393 if (IEM_IS_32BIT_CODE(pVCpu))
6394 {
6395 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
6396 }
6397 else
6398 {
6399 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
6400 }
6401 }
6402 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
6403 {
6404 IEMOP_MOV_SW_EV_REG_BODY(0);
6405 }
6406 else
6407 {
6408 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
6409 }
6410#undef IEMOP_MOV_SW_EV_REG_BODY
6411 }
6412 else
6413 {
6414 /*
6415 * We're loading the register from memory. The access is word sized
6416 * regardless of operand size prefixes.
6417 */
6418#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
6419 IEM_MC_BEGIN(2, 1, 0, a_fCImplFlags); \
6420 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
6421 IEM_MC_ARG(uint16_t, u16Value, 1); \
6422 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
6423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
6424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6425 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
6426 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
6427 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
6428 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
6429 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
6430 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
6431 iemCImpl_load_SReg, iSRegArg, u16Value); \
6432 IEM_MC_END()
6433
6434 if (iSegReg == X86_SREG_SS)
6435 {
6436 if (IEM_IS_32BIT_CODE(pVCpu))
6437 {
6438 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
6439 }
6440 else
6441 {
6442 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
6443 }
6444 }
6445 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
6446 {
6447 IEMOP_MOV_SW_EV_MEM_BODY(0);
6448 }
6449 else
6450 {
6451 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
6452 }
6453#undef IEMOP_MOV_SW_EV_MEM_BODY
6454 }
6455}
6456
6457
6458/** Opcode 0x8f /0. */
6459FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
6460{
6461 /* This bugger is rather annoying as it requires rSP to be updated before
6462 doing the effective address calculations. Will eventually require a
6463 split between the R/M+SIB decoding and the effective address
6464 calculation - which is something that is required for any attempt at
6465 reusing this code for a recompiler. It may also be good to have if we
6466 need to delay #UD exception caused by invalid lock prefixes.
6467
6468 For now, we'll do a mostly safe interpreter-only implementation here. */
6469 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
6470 * now until tests show it's checked.. */
6471 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
6472
6473 /* Register access is relatively easy and can share code. */
6474 if (IEM_IS_MODRM_REG_MODE(bRm))
6475 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
6476
6477 /*
6478 * Memory target.
6479 *
6480 * Intel says that RSP is incremented before it's used in any effective
6481 * address calcuations. This means some serious extra annoyance here since
6482 * we decode and calculate the effective address in one step and like to
6483 * delay committing registers till everything is done.
6484 *
6485 * So, we'll decode and calculate the effective address twice. This will
6486 * require some recoding if turned into a recompiler.
6487 */
6488 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
6489
6490#if 1 /* This can be compiled, optimize later if needed. */
6491 switch (pVCpu->iem.s.enmEffOpSize)
6492 {
6493 case IEMMODE_16BIT:
6494 IEM_MC_BEGIN(2, 0, 0, 0);
6495 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
6497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6498 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6499 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
6500 IEM_MC_END();
6501 break;
6502
6503 case IEMMODE_32BIT:
6504 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
6505 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
6507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6508 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6509 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
6510 IEM_MC_END();
6511 break;
6512
6513 case IEMMODE_64BIT:
6514 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
6515 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
6517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6518 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6519 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
6520 IEM_MC_END();
6521 break;
6522
6523 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6524 }
6525
6526#else
6527# ifndef TST_IEM_CHECK_MC
6528 /* Calc effective address with modified ESP. */
6529/** @todo testcase */
6530 RTGCPTR GCPtrEff;
6531 VBOXSTRICTRC rcStrict;
6532 switch (pVCpu->iem.s.enmEffOpSize)
6533 {
6534 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
6535 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
6536 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
6537 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6538 }
6539 if (rcStrict != VINF_SUCCESS)
6540 return rcStrict;
6541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6542
6543 /* Perform the operation - this should be CImpl. */
6544 RTUINT64U TmpRsp;
6545 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
6546 switch (pVCpu->iem.s.enmEffOpSize)
6547 {
6548 case IEMMODE_16BIT:
6549 {
6550 uint16_t u16Value;
6551 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
6552 if (rcStrict == VINF_SUCCESS)
6553 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
6554 break;
6555 }
6556
6557 case IEMMODE_32BIT:
6558 {
6559 uint32_t u32Value;
6560 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
6561 if (rcStrict == VINF_SUCCESS)
6562 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
6563 break;
6564 }
6565
6566 case IEMMODE_64BIT:
6567 {
6568 uint64_t u64Value;
6569 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
6570 if (rcStrict == VINF_SUCCESS)
6571 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
6572 break;
6573 }
6574
6575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6576 }
6577 if (rcStrict == VINF_SUCCESS)
6578 {
6579 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
6580 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
6581 }
6582 return rcStrict;
6583
6584# else
6585 return VERR_IEM_IPE_2;
6586# endif
6587#endif
6588}
6589
6590
6591/**
6592 * @opcode 0x8f
6593 */
6594FNIEMOP_DEF(iemOp_Grp1A__xop)
6595{
6596 /*
6597 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
6598 * three byte VEX prefix, except that the mmmmm field cannot have the values
6599 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
6600 */
6601 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6602 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
6603 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
6604
6605 IEMOP_MNEMONIC(xop, "xop");
6606 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
6607 {
6608 /** @todo Test when exctly the XOP conformance checks kick in during
6609 * instruction decoding and fetching (using \#PF). */
6610 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6611 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6612 if ( ( pVCpu->iem.s.fPrefixes
6613 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6614 == 0)
6615 {
6616 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6617 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6618 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6619 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6620 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6621 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6622 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6623 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6624 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6625
6626 /** @todo XOP: Just use new tables and decoders. */
6627 switch (bRm & 0x1f)
6628 {
6629 case 8: /* xop opcode map 8. */
6630 IEMOP_BITCH_ABOUT_STUB();
6631 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6632
6633 case 9: /* xop opcode map 9. */
6634 IEMOP_BITCH_ABOUT_STUB();
6635 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6636
6637 case 10: /* xop opcode map 10. */
6638 IEMOP_BITCH_ABOUT_STUB();
6639 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6640
6641 default:
6642 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6643 IEMOP_RAISE_INVALID_OPCODE_RET();
6644 }
6645 }
6646 else
6647 Log(("XOP: Invalid prefix mix!\n"));
6648 }
6649 else
6650 Log(("XOP: XOP support disabled!\n"));
6651 IEMOP_RAISE_INVALID_OPCODE_RET();
6652}
6653
6654
6655/**
6656 * Common 'xchg reg,rAX' helper.
6657 */
6658FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6659{
6660 iReg |= pVCpu->iem.s.uRexB;
6661 switch (pVCpu->iem.s.enmEffOpSize)
6662 {
6663 case IEMMODE_16BIT:
6664 IEM_MC_BEGIN(0, 2, 0, 0);
6665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6666 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6667 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6668 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6669 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6670 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6671 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6672 IEM_MC_ADVANCE_RIP_AND_FINISH();
6673 IEM_MC_END();
6674 break;
6675
6676 case IEMMODE_32BIT:
6677 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6679 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6680 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6681 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6682 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6683 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6684 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6685 IEM_MC_ADVANCE_RIP_AND_FINISH();
6686 IEM_MC_END();
6687 break;
6688
6689 case IEMMODE_64BIT:
6690 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6692 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6693 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6694 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6695 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6696 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6697 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6698 IEM_MC_ADVANCE_RIP_AND_FINISH();
6699 IEM_MC_END();
6700 break;
6701
6702 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6703 }
6704}
6705
6706
6707/**
6708 * @opcode 0x90
6709 */
6710FNIEMOP_DEF(iemOp_nop)
6711{
6712 /* R8/R8D and RAX/EAX can be exchanged. */
6713 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6714 {
6715 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6716 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6717 }
6718
6719 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6720 {
6721 IEMOP_MNEMONIC(pause, "pause");
6722 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6723 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6724 if (!IEM_IS_IN_GUEST(pVCpu))
6725 { /* probable */ }
6726#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6727 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6728 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmx_pause);
6729#endif
6730#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6731 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6732 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_svm_pause);
6733#endif
6734 }
6735 else
6736 IEMOP_MNEMONIC(nop, "nop");
6737 /** @todo testcase: lock nop; lock pause */
6738 IEM_MC_BEGIN(0, 0, 0, 0);
6739 IEMOP_HLP_DONE_DECODING();
6740 IEM_MC_ADVANCE_RIP_AND_FINISH();
6741 IEM_MC_END();
6742}
6743
6744
6745/**
6746 * @opcode 0x91
6747 */
6748FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6749{
6750 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6751 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6752}
6753
6754
6755/**
6756 * @opcode 0x92
6757 */
6758FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6759{
6760 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6761 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6762}
6763
6764
6765/**
6766 * @opcode 0x93
6767 */
6768FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6769{
6770 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6771 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6772}
6773
6774
6775/**
6776 * @opcode 0x94
6777 */
6778FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6779{
6780 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6781 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6782}
6783
6784
6785/**
6786 * @opcode 0x95
6787 */
6788FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6789{
6790 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6791 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6792}
6793
6794
6795/**
6796 * @opcode 0x96
6797 */
6798FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6799{
6800 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6801 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6802}
6803
6804
6805/**
6806 * @opcode 0x97
6807 */
6808FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6809{
6810 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6811 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6812}
6813
6814
6815/**
6816 * @opcode 0x98
6817 */
6818FNIEMOP_DEF(iemOp_cbw)
6819{
6820 switch (pVCpu->iem.s.enmEffOpSize)
6821 {
6822 case IEMMODE_16BIT:
6823 IEMOP_MNEMONIC(cbw, "cbw");
6824 IEM_MC_BEGIN(0, 1, 0, 0);
6825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6826 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6827 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6828 } IEM_MC_ELSE() {
6829 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6830 } IEM_MC_ENDIF();
6831 IEM_MC_ADVANCE_RIP_AND_FINISH();
6832 IEM_MC_END();
6833 break;
6834
6835 case IEMMODE_32BIT:
6836 IEMOP_MNEMONIC(cwde, "cwde");
6837 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6839 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6840 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6841 } IEM_MC_ELSE() {
6842 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6843 } IEM_MC_ENDIF();
6844 IEM_MC_ADVANCE_RIP_AND_FINISH();
6845 IEM_MC_END();
6846 break;
6847
6848 case IEMMODE_64BIT:
6849 IEMOP_MNEMONIC(cdqe, "cdqe");
6850 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6852 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6853 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6854 } IEM_MC_ELSE() {
6855 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6856 } IEM_MC_ENDIF();
6857 IEM_MC_ADVANCE_RIP_AND_FINISH();
6858 IEM_MC_END();
6859 break;
6860
6861 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6862 }
6863}
6864
6865
6866/**
6867 * @opcode 0x99
6868 */
6869FNIEMOP_DEF(iemOp_cwd)
6870{
6871 switch (pVCpu->iem.s.enmEffOpSize)
6872 {
6873 case IEMMODE_16BIT:
6874 IEMOP_MNEMONIC(cwd, "cwd");
6875 IEM_MC_BEGIN(0, 1, 0, 0);
6876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6877 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6878 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6879 } IEM_MC_ELSE() {
6880 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6881 } IEM_MC_ENDIF();
6882 IEM_MC_ADVANCE_RIP_AND_FINISH();
6883 IEM_MC_END();
6884 break;
6885
6886 case IEMMODE_32BIT:
6887 IEMOP_MNEMONIC(cdq, "cdq");
6888 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6890 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6891 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6892 } IEM_MC_ELSE() {
6893 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6894 } IEM_MC_ENDIF();
6895 IEM_MC_ADVANCE_RIP_AND_FINISH();
6896 IEM_MC_END();
6897 break;
6898
6899 case IEMMODE_64BIT:
6900 IEMOP_MNEMONIC(cqo, "cqo");
6901 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6903 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6904 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6905 } IEM_MC_ELSE() {
6906 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6907 } IEM_MC_ENDIF();
6908 IEM_MC_ADVANCE_RIP_AND_FINISH();
6909 IEM_MC_END();
6910 break;
6911
6912 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6913 }
6914}
6915
6916
6917/**
6918 * @opcode 0x9a
6919 */
6920FNIEMOP_DEF(iemOp_call_Ap)
6921{
6922 IEMOP_MNEMONIC(call_Ap, "call Ap");
6923 IEMOP_HLP_NO_64BIT();
6924
6925 /* Decode the far pointer address and pass it on to the far call C implementation. */
6926 uint32_t off32Seg;
6927 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6928 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6929 else
6930 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6931 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6933 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
6934 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
6935 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6936 /** @todo make task-switches, ring-switches, ++ return non-zero status */
6937}
6938
6939
6940/** Opcode 0x9b. (aka fwait) */
6941FNIEMOP_DEF(iemOp_wait)
6942{
6943 IEMOP_MNEMONIC(wait, "wait");
6944 IEM_MC_BEGIN(0, 0, 0, 0);
6945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6946 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6947 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6948 IEM_MC_ADVANCE_RIP_AND_FINISH();
6949 IEM_MC_END();
6950}
6951
6952
6953/**
6954 * @opcode 0x9c
6955 */
6956FNIEMOP_DEF(iemOp_pushf_Fv)
6957{
6958 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6960 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6961 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6962 iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6963}
6964
6965
6966/**
6967 * @opcode 0x9d
6968 */
6969FNIEMOP_DEF(iemOp_popf_Fv)
6970{
6971 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6973 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6974 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6975 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6976 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6977}
6978
6979
6980/**
6981 * @opcode 0x9e
6982 * @opflmodify cf,pf,af,zf,sf
6983 */
6984FNIEMOP_DEF(iemOp_sahf)
6985{
6986 IEMOP_MNEMONIC(sahf, "sahf");
6987 if ( IEM_IS_64BIT_CODE(pVCpu)
6988 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6989 IEMOP_RAISE_INVALID_OPCODE_RET();
6990 IEM_MC_BEGIN(0, 2, 0, 0);
6991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6992 IEM_MC_LOCAL(uint32_t, u32Flags);
6993 IEM_MC_LOCAL(uint32_t, EFlags);
6994 IEM_MC_FETCH_EFLAGS(EFlags);
6995 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6996 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6997 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6998 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6999 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
7000 IEM_MC_COMMIT_EFLAGS(EFlags);
7001 IEM_MC_ADVANCE_RIP_AND_FINISH();
7002 IEM_MC_END();
7003}
7004
7005
7006/**
7007 * @opcode 0x9f
7008 * @opfltest cf,pf,af,zf,sf
7009 */
7010FNIEMOP_DEF(iemOp_lahf)
7011{
7012 IEMOP_MNEMONIC(lahf, "lahf");
7013 if ( IEM_IS_64BIT_CODE(pVCpu)
7014 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
7015 IEMOP_RAISE_INVALID_OPCODE_RET();
7016 IEM_MC_BEGIN(0, 1, 0, 0);
7017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7018 IEM_MC_LOCAL(uint8_t, u8Flags);
7019 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
7020 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
7021 IEM_MC_ADVANCE_RIP_AND_FINISH();
7022 IEM_MC_END();
7023}
7024
7025
7026/**
7027 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
7028 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
7029 * Will return/throw on failures.
7030 * @param a_GCPtrMemOff The variable to store the offset in.
7031 */
7032#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
7033 do \
7034 { \
7035 switch (pVCpu->iem.s.enmEffAddrMode) \
7036 { \
7037 case IEMMODE_16BIT: \
7038 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
7039 break; \
7040 case IEMMODE_32BIT: \
7041 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
7042 break; \
7043 case IEMMODE_64BIT: \
7044 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
7045 break; \
7046 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
7047 } \
7048 } while (0)
7049
7050/**
7051 * @opcode 0xa0
7052 */
7053FNIEMOP_DEF(iemOp_mov_AL_Ob)
7054{
7055 /*
7056 * Get the offset.
7057 */
7058 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
7059 RTGCPTR GCPtrMemOffDecode;
7060 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
7061
7062 /*
7063 * Fetch AL.
7064 */
7065 IEM_MC_BEGIN(0, 2, 0, 0);
7066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7067 IEM_MC_LOCAL(uint8_t, u8Tmp);
7068 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7069 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
7070 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7071 IEM_MC_ADVANCE_RIP_AND_FINISH();
7072 IEM_MC_END();
7073}
7074
7075
7076/**
7077 * @opcode 0xa1
7078 */
7079FNIEMOP_DEF(iemOp_mov_rAX_Ov)
7080{
7081 /*
7082 * Get the offset.
7083 */
7084 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
7085 RTGCPTR GCPtrMemOffDecode;
7086 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
7087
7088 /*
7089 * Fetch rAX.
7090 */
7091 switch (pVCpu->iem.s.enmEffOpSize)
7092 {
7093 case IEMMODE_16BIT:
7094 IEM_MC_BEGIN(0, 2, 0, 0);
7095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7096 IEM_MC_LOCAL(uint16_t, u16Tmp);
7097 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7098 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
7099 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
7100 IEM_MC_ADVANCE_RIP_AND_FINISH();
7101 IEM_MC_END();
7102 break;
7103
7104 case IEMMODE_32BIT:
7105 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
7106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7107 IEM_MC_LOCAL(uint32_t, u32Tmp);
7108 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7109 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
7110 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
7111 IEM_MC_ADVANCE_RIP_AND_FINISH();
7112 IEM_MC_END();
7113 break;
7114
7115 case IEMMODE_64BIT:
7116 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
7117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7118 IEM_MC_LOCAL(uint64_t, u64Tmp);
7119 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7120 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
7121 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
7122 IEM_MC_ADVANCE_RIP_AND_FINISH();
7123 IEM_MC_END();
7124 break;
7125
7126 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7127 }
7128}
7129
7130
7131/**
7132 * @opcode 0xa2
7133 */
7134FNIEMOP_DEF(iemOp_mov_Ob_AL)
7135{
7136 /*
7137 * Get the offset.
7138 */
7139 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
7140 RTGCPTR GCPtrMemOffDecode;
7141 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
7142
7143 /*
7144 * Store AL.
7145 */
7146 IEM_MC_BEGIN(0, 2, 0, 0);
7147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7148 IEM_MC_LOCAL(uint8_t, u8Tmp);
7149 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
7150 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7151 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
7152 IEM_MC_ADVANCE_RIP_AND_FINISH();
7153 IEM_MC_END();
7154}
7155
7156
7157/**
7158 * @opcode 0xa3
7159 */
7160FNIEMOP_DEF(iemOp_mov_Ov_rAX)
7161{
7162 /*
7163 * Get the offset.
7164 */
7165 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
7166 RTGCPTR GCPtrMemOffDecode;
7167 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
7168
7169 /*
7170 * Store rAX.
7171 */
7172 switch (pVCpu->iem.s.enmEffOpSize)
7173 {
7174 case IEMMODE_16BIT:
7175 IEM_MC_BEGIN(0, 2, 0, 0);
7176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7177 IEM_MC_LOCAL(uint16_t, u16Tmp);
7178 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
7179 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7180 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
7181 IEM_MC_ADVANCE_RIP_AND_FINISH();
7182 IEM_MC_END();
7183 break;
7184
7185 case IEMMODE_32BIT:
7186 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
7187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7188 IEM_MC_LOCAL(uint32_t, u32Tmp);
7189 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
7190 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7191 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
7192 IEM_MC_ADVANCE_RIP_AND_FINISH();
7193 IEM_MC_END();
7194 break;
7195
7196 case IEMMODE_64BIT:
7197 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
7198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7199 IEM_MC_LOCAL(uint64_t, u64Tmp);
7200 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
7201 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7202 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
7203 IEM_MC_ADVANCE_RIP_AND_FINISH();
7204 IEM_MC_END();
7205 break;
7206
7207 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7208 }
7209}
7210
7211/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
7212#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
7213 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7215 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7216 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7217 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7218 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7219 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7220 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7221 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7222 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7223 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7224 } IEM_MC_ELSE() { \
7225 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7226 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7227 } IEM_MC_ENDIF(); \
7228 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7229 IEM_MC_END() \
7230
7231/**
7232 * @opcode 0xa4
7233 * @opfltest df
7234 */
7235FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
7236{
7237 /*
7238 * Use the C implementation if a repeat prefix is encountered.
7239 */
7240 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7241 {
7242 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
7243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7244 switch (pVCpu->iem.s.enmEffAddrMode)
7245 {
7246 case IEMMODE_16BIT:
7247 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7248 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7249 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7250 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7251 iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
7252 case IEMMODE_32BIT:
7253 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7254 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7255 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7256 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7257 iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
7258 case IEMMODE_64BIT:
7259 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7260 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7261 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7262 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7263 iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
7264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7265 }
7266 }
7267
7268 /*
7269 * Sharing case implementation with movs[wdq] below.
7270 */
7271 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
7272 switch (pVCpu->iem.s.enmEffAddrMode)
7273 {
7274 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7275 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7276 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
7277 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7278 }
7279}
7280
7281
7282/**
7283 * @opcode 0xa5
7284 * @opfltest df
7285 */
7286FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
7287{
7288
7289 /*
7290 * Use the C implementation if a repeat prefix is encountered.
7291 */
7292 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7293 {
7294 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
7295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7296 switch (pVCpu->iem.s.enmEffOpSize)
7297 {
7298 case IEMMODE_16BIT:
7299 switch (pVCpu->iem.s.enmEffAddrMode)
7300 {
7301 case IEMMODE_16BIT:
7302 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7303 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7304 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7305 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7306 iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
7307 case IEMMODE_32BIT:
7308 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7309 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7310 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7311 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7312 iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
7313 case IEMMODE_64BIT:
7314 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7315 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7316 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7317 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7318 iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
7319 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7320 }
7321 break;
7322 case IEMMODE_32BIT:
7323 switch (pVCpu->iem.s.enmEffAddrMode)
7324 {
7325 case IEMMODE_16BIT:
7326 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7327 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7328 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7329 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7330 iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
7331 case IEMMODE_32BIT:
7332 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7333 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7334 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7335 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7336 iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
7337 case IEMMODE_64BIT:
7338 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7339 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7340 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7341 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7342 iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
7343 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7344 }
7345 case IEMMODE_64BIT:
7346 switch (pVCpu->iem.s.enmEffAddrMode)
7347 {
7348 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
7349 case IEMMODE_32BIT:
7350 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7351 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7352 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7353 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7354 iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
7355 case IEMMODE_64BIT:
7356 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7357 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7358 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7359 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7360 iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
7361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7362 }
7363 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7364 }
7365 }
7366
7367 /*
7368 * Annoying double switch here.
7369 * Using ugly macro for implementing the cases, sharing it with movsb.
7370 */
7371 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
7372 switch (pVCpu->iem.s.enmEffOpSize)
7373 {
7374 case IEMMODE_16BIT:
7375 switch (pVCpu->iem.s.enmEffAddrMode)
7376 {
7377 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7378 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7379 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
7380 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7381 }
7382 break;
7383
7384 case IEMMODE_32BIT:
7385 switch (pVCpu->iem.s.enmEffAddrMode)
7386 {
7387 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7388 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7389 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
7390 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7391 }
7392 break;
7393
7394 case IEMMODE_64BIT:
7395 switch (pVCpu->iem.s.enmEffAddrMode)
7396 {
7397 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7398 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
7399 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
7400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7401 }
7402 break;
7403 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7404 }
7405}
7406
7407#undef IEM_MOVS_CASE
7408
7409/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
7410#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
7411 IEM_MC_BEGIN(3, 3, a_fMcFlags, 0); \
7412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7413 \
7414 IEM_MC_LOCAL(RTGCPTR, uAddr1); \
7415 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr1, X86_GREG_xSI); \
7416 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
7417 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr1); \
7418 \
7419 IEM_MC_LOCAL(RTGCPTR, uAddr2); \
7420 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr2, X86_GREG_xDI); \
7421 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
7422 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr2); \
7423 \
7424 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7425 IEM_MC_REF_EFLAGS(pEFlags); \
7426 IEM_MC_ARG_LOCAL_REF(uint##ValBits##_t *, puValue1, uValue1, 0); \
7427 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
7428 \
7429 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7430 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7431 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7432 } IEM_MC_ELSE() { \
7433 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7434 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7435 } IEM_MC_ENDIF(); \
7436 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7437 IEM_MC_END() \
7438
7439/**
7440 * @opcode 0xa6
7441 * @opflclass arithmetic
7442 * @opfltest df
7443 */
7444FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
7445{
7446
7447 /*
7448 * Use the C implementation if a repeat prefix is encountered.
7449 */
7450 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7451 {
7452 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
7453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7454 switch (pVCpu->iem.s.enmEffAddrMode)
7455 {
7456 case IEMMODE_16BIT:
7457 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7458 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7459 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7460 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7461 iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
7462 case IEMMODE_32BIT:
7463 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7464 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7465 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7466 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7467 iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7468 case IEMMODE_64BIT:
7469 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7470 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7471 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7472 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7473 iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7475 }
7476 }
7477 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7478 {
7479 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
7480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7481 switch (pVCpu->iem.s.enmEffAddrMode)
7482 {
7483 case IEMMODE_16BIT:
7484 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7485 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7486 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7487 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7488 iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
7489 case IEMMODE_32BIT:
7490 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7491 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7492 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7493 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7494 iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7495 case IEMMODE_64BIT:
7496 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7497 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7498 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7499 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7500 iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7502 }
7503 }
7504
7505 /*
7506 * Sharing case implementation with cmps[wdq] below.
7507 */
7508 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
7509 switch (pVCpu->iem.s.enmEffAddrMode)
7510 {
7511 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7512 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7513 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
7514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7515 }
7516}
7517
7518
7519/**
7520 * @opcode 0xa7
7521 * @opflclass arithmetic
7522 * @opfltest df
7523 */
7524FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
7525{
7526 /*
7527 * Use the C implementation if a repeat prefix is encountered.
7528 */
7529 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7530 {
7531 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
7532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7533 switch (pVCpu->iem.s.enmEffOpSize)
7534 {
7535 case IEMMODE_16BIT:
7536 switch (pVCpu->iem.s.enmEffAddrMode)
7537 {
7538 case IEMMODE_16BIT:
7539 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7540 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7541 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7542 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7543 iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7544 case IEMMODE_32BIT:
7545 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7546 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7547 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7548 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7549 iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7550 case IEMMODE_64BIT:
7551 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7552 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7553 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7554 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7555 iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7556 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7557 }
7558 break;
7559 case IEMMODE_32BIT:
7560 switch (pVCpu->iem.s.enmEffAddrMode)
7561 {
7562 case IEMMODE_16BIT:
7563 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7564 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7565 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7566 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7567 iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7568 case IEMMODE_32BIT:
7569 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7570 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7571 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7572 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7573 iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7574 case IEMMODE_64BIT:
7575 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7576 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7577 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7578 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7579 iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7580 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7581 }
7582 case IEMMODE_64BIT:
7583 switch (pVCpu->iem.s.enmEffAddrMode)
7584 {
7585 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
7586 case IEMMODE_32BIT:
7587 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7588 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7589 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7590 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7591 iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7592 case IEMMODE_64BIT:
7593 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7594 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7595 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7596 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7597 iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7598 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7599 }
7600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7601 }
7602 }
7603
7604 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7605 {
7606 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
7607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7608 switch (pVCpu->iem.s.enmEffOpSize)
7609 {
7610 case IEMMODE_16BIT:
7611 switch (pVCpu->iem.s.enmEffAddrMode)
7612 {
7613 case IEMMODE_16BIT:
7614 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7615 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7616 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7617 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7618 iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7619 case IEMMODE_32BIT:
7620 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7621 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7622 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7623 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7624 iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7625 case IEMMODE_64BIT:
7626 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7627 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7628 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7629 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7630 iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7631 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7632 }
7633 break;
7634 case IEMMODE_32BIT:
7635 switch (pVCpu->iem.s.enmEffAddrMode)
7636 {
7637 case IEMMODE_16BIT:
7638 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7639 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7640 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7641 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7642 iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7643 case IEMMODE_32BIT:
7644 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7645 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7646 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7647 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7648 iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7649 case IEMMODE_64BIT:
7650 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7651 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7652 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7653 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7654 iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7655 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7656 }
7657 case IEMMODE_64BIT:
7658 switch (pVCpu->iem.s.enmEffAddrMode)
7659 {
7660 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
7661 case IEMMODE_32BIT:
7662 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7663 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7664 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7665 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7666 iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7667 case IEMMODE_64BIT:
7668 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7669 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7670 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7671 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7672 iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7673 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7674 }
7675 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7676 }
7677 }
7678
7679 /*
7680 * Annoying double switch here.
7681 * Using ugly macro for implementing the cases, sharing it with cmpsb.
7682 */
7683 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
7684 switch (pVCpu->iem.s.enmEffOpSize)
7685 {
7686 case IEMMODE_16BIT:
7687 switch (pVCpu->iem.s.enmEffAddrMode)
7688 {
7689 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7690 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7691 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
7692 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7693 }
7694 break;
7695
7696 case IEMMODE_32BIT:
7697 switch (pVCpu->iem.s.enmEffAddrMode)
7698 {
7699 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7700 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7701 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
7702 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7703 }
7704 break;
7705
7706 case IEMMODE_64BIT:
7707 switch (pVCpu->iem.s.enmEffAddrMode)
7708 {
7709 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7710 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
7711 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
7712 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7713 }
7714 break;
7715 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7716 }
7717}
7718
7719#undef IEM_CMPS_CASE
7720
7721/**
7722 * @opcode 0xa8
7723 * @opflclass logical
7724 */
7725FNIEMOP_DEF(iemOp_test_AL_Ib)
7726{
7727 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
7728 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7729 IEMOP_BODY_BINARY_AL_Ib(test, 0);
7730}
7731
7732
7733/**
7734 * @opcode 0xa9
7735 * @opflclass logical
7736 */
7737FNIEMOP_DEF(iemOp_test_eAX_Iz)
7738{
7739 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
7740 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7741 IEMOP_BODY_BINARY_rAX_Iz_RO(test, 0);
7742}
7743
7744
7745/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
7746#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
7747 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7749 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7750 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7751 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
7752 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7753 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7754 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7755 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7756 } IEM_MC_ELSE() { \
7757 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7758 } IEM_MC_ENDIF(); \
7759 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7760 IEM_MC_END() \
7761
7762/**
7763 * @opcode 0xaa
7764 */
7765FNIEMOP_DEF(iemOp_stosb_Yb_AL)
7766{
7767 /*
7768 * Use the C implementation if a repeat prefix is encountered.
7769 */
7770 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7771 {
7772 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
7773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7774 switch (pVCpu->iem.s.enmEffAddrMode)
7775 {
7776 case IEMMODE_16BIT:
7777 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7778 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7779 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7780 iemCImpl_stos_al_m16);
7781 case IEMMODE_32BIT:
7782 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7783 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7784 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7785 iemCImpl_stos_al_m32);
7786 case IEMMODE_64BIT:
7787 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7788 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7789 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7790 iemCImpl_stos_al_m64);
7791 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7792 }
7793 }
7794
7795 /*
7796 * Sharing case implementation with stos[wdq] below.
7797 */
7798 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
7799 switch (pVCpu->iem.s.enmEffAddrMode)
7800 {
7801 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7802 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7803 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
7804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7805 }
7806}
7807
7808
7809/**
7810 * @opcode 0xab
7811 */
7812FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7813{
7814 /*
7815 * Use the C implementation if a repeat prefix is encountered.
7816 */
7817 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7818 {
7819 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7821 switch (pVCpu->iem.s.enmEffOpSize)
7822 {
7823 case IEMMODE_16BIT:
7824 switch (pVCpu->iem.s.enmEffAddrMode)
7825 {
7826 case IEMMODE_16BIT:
7827 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7828 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7829 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7830 iemCImpl_stos_ax_m16);
7831 case IEMMODE_32BIT:
7832 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7833 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7834 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7835 iemCImpl_stos_ax_m32);
7836 case IEMMODE_64BIT:
7837 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7838 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7839 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7840 iemCImpl_stos_ax_m64);
7841 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7842 }
7843 break;
7844 case IEMMODE_32BIT:
7845 switch (pVCpu->iem.s.enmEffAddrMode)
7846 {
7847 case IEMMODE_16BIT:
7848 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7849 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7850 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7851 iemCImpl_stos_eax_m16);
7852 case IEMMODE_32BIT:
7853 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7854 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7855 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7856 iemCImpl_stos_eax_m32);
7857 case IEMMODE_64BIT:
7858 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7859 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7860 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7861 iemCImpl_stos_eax_m64);
7862 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7863 }
7864 case IEMMODE_64BIT:
7865 switch (pVCpu->iem.s.enmEffAddrMode)
7866 {
7867 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7868 case IEMMODE_32BIT:
7869 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7870 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7871 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7872 iemCImpl_stos_rax_m32);
7873 case IEMMODE_64BIT:
7874 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7875 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7876 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7877 iemCImpl_stos_rax_m64);
7878 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7879 }
7880 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7881 }
7882 }
7883
7884 /*
7885 * Annoying double switch here.
7886 * Using ugly macro for implementing the cases, sharing it with stosb.
7887 */
7888 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7889 switch (pVCpu->iem.s.enmEffOpSize)
7890 {
7891 case IEMMODE_16BIT:
7892 switch (pVCpu->iem.s.enmEffAddrMode)
7893 {
7894 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7895 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7896 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7897 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7898 }
7899 break;
7900
7901 case IEMMODE_32BIT:
7902 switch (pVCpu->iem.s.enmEffAddrMode)
7903 {
7904 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7905 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7906 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7908 }
7909 break;
7910
7911 case IEMMODE_64BIT:
7912 switch (pVCpu->iem.s.enmEffAddrMode)
7913 {
7914 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7915 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7916 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7917 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7918 }
7919 break;
7920 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7921 }
7922}
7923
7924#undef IEM_STOS_CASE
7925
7926/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7927#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7928 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7930 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7931 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7932 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7933 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7934 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7935 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7936 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7937 } IEM_MC_ELSE() { \
7938 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7939 } IEM_MC_ENDIF(); \
7940 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7941 IEM_MC_END() \
7942
7943/**
7944 * @opcode 0xac
7945 * @opfltest df
7946 */
7947FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7948{
7949 /*
7950 * Use the C implementation if a repeat prefix is encountered.
7951 */
7952 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7953 {
7954 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7956 switch (pVCpu->iem.s.enmEffAddrMode)
7957 {
7958 case IEMMODE_16BIT:
7959 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7960 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7961 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7962 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7963 iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7964 case IEMMODE_32BIT:
7965 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7966 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7967 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7968 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7969 iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7970 case IEMMODE_64BIT:
7971 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7972 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7973 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7974 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7975 iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7976 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7977 }
7978 }
7979
7980 /*
7981 * Sharing case implementation with stos[wdq] below.
7982 */
7983 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7984 switch (pVCpu->iem.s.enmEffAddrMode)
7985 {
7986 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7987 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7988 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7989 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7990 }
7991}
7992
7993
7994/**
7995 * @opcode 0xad
7996 * @opfltest df
7997 */
7998FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7999{
8000 /*
8001 * Use the C implementation if a repeat prefix is encountered.
8002 */
8003 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8004 {
8005 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
8006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8007 switch (pVCpu->iem.s.enmEffOpSize)
8008 {
8009 case IEMMODE_16BIT:
8010 switch (pVCpu->iem.s.enmEffAddrMode)
8011 {
8012 case IEMMODE_16BIT:
8013 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8014 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8015 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8016 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8017 iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
8018 case IEMMODE_32BIT:
8019 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8020 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8021 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8022 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8023 iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
8024 case IEMMODE_64BIT:
8025 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8026 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8027 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8028 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8029 iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
8030 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8031 }
8032 break;
8033 case IEMMODE_32BIT:
8034 switch (pVCpu->iem.s.enmEffAddrMode)
8035 {
8036 case IEMMODE_16BIT:
8037 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8038 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8039 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8040 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8041 iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
8042 case IEMMODE_32BIT:
8043 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8044 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8045 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8046 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8047 iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
8048 case IEMMODE_64BIT:
8049 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8050 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8051 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8052 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8053 iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
8054 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8055 }
8056 case IEMMODE_64BIT:
8057 switch (pVCpu->iem.s.enmEffAddrMode)
8058 {
8059 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
8060 case IEMMODE_32BIT:
8061 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8062 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8063 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8064 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8065 iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
8066 case IEMMODE_64BIT:
8067 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8068 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8069 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8070 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8071 iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
8072 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8073 }
8074 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8075 }
8076 }
8077
8078 /*
8079 * Annoying double switch here.
8080 * Using ugly macro for implementing the cases, sharing it with lodsb.
8081 */
8082 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
8083 switch (pVCpu->iem.s.enmEffOpSize)
8084 {
8085 case IEMMODE_16BIT:
8086 switch (pVCpu->iem.s.enmEffAddrMode)
8087 {
8088 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
8089 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
8090 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
8091 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8092 }
8093 break;
8094
8095 case IEMMODE_32BIT:
8096 switch (pVCpu->iem.s.enmEffAddrMode)
8097 {
8098 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
8099 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
8100 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
8101 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8102 }
8103 break;
8104
8105 case IEMMODE_64BIT:
8106 switch (pVCpu->iem.s.enmEffAddrMode)
8107 {
8108 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
8109 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
8110 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
8111 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8112 }
8113 break;
8114 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8115 }
8116}
8117
8118#undef IEM_LODS_CASE
8119
8120/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
8121#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
8122 IEM_MC_BEGIN(3, 2, a_fMcFlags, 0); \
8123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8124 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
8125 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
8126 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8127 IEM_MC_LOCAL(RTGCPTR, uAddr); \
8128 \
8129 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
8130 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
8131 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
8132 IEM_MC_REF_EFLAGS(pEFlags); \
8133 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
8134 \
8135 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
8136 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
8137 } IEM_MC_ELSE() { \
8138 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
8139 } IEM_MC_ENDIF(); \
8140 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8141 IEM_MC_END();
8142
8143/**
8144 * @opcode 0xae
8145 * @opflclass arithmetic
8146 * @opfltest df
8147 */
8148FNIEMOP_DEF(iemOp_scasb_AL_Xb)
8149{
8150 /*
8151 * Use the C implementation if a repeat prefix is encountered.
8152 */
8153 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
8154 {
8155 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
8156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8157 switch (pVCpu->iem.s.enmEffAddrMode)
8158 {
8159 case IEMMODE_16BIT:
8160 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8161 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8162 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8163 iemCImpl_repe_scas_al_m16);
8164 case IEMMODE_32BIT:
8165 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8166 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8167 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8168 iemCImpl_repe_scas_al_m32);
8169 case IEMMODE_64BIT:
8170 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8171 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8172 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8173 iemCImpl_repe_scas_al_m64);
8174 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8175 }
8176 }
8177 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
8178 {
8179 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
8180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8181 switch (pVCpu->iem.s.enmEffAddrMode)
8182 {
8183 case IEMMODE_16BIT:
8184 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8185 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8186 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8187 iemCImpl_repne_scas_al_m16);
8188 case IEMMODE_32BIT:
8189 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8190 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8191 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8192 iemCImpl_repne_scas_al_m32);
8193 case IEMMODE_64BIT:
8194 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8195 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8196 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8197 iemCImpl_repne_scas_al_m64);
8198 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8199 }
8200 }
8201
8202 /*
8203 * Sharing case implementation with stos[wdq] below.
8204 */
8205 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
8206 switch (pVCpu->iem.s.enmEffAddrMode)
8207 {
8208 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
8209 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
8210 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
8211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8212 }
8213}
8214
8215
8216/**
8217 * @opcode 0xaf
8218 * @opflclass arithmetic
8219 * @opfltest df
8220 */
8221FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
8222{
8223 /*
8224 * Use the C implementation if a repeat prefix is encountered.
8225 */
8226 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
8227 {
8228 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
8229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8230 switch (pVCpu->iem.s.enmEffOpSize)
8231 {
8232 case IEMMODE_16BIT:
8233 switch (pVCpu->iem.s.enmEffAddrMode)
8234 {
8235 case IEMMODE_16BIT:
8236 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8237 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8238 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8239 iemCImpl_repe_scas_ax_m16);
8240 case IEMMODE_32BIT:
8241 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8242 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8243 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8244 iemCImpl_repe_scas_ax_m32);
8245 case IEMMODE_64BIT:
8246 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8247 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8248 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8249 iemCImpl_repe_scas_ax_m64);
8250 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8251 }
8252 break;
8253 case IEMMODE_32BIT:
8254 switch (pVCpu->iem.s.enmEffAddrMode)
8255 {
8256 case IEMMODE_16BIT:
8257 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8258 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8259 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8260 iemCImpl_repe_scas_eax_m16);
8261 case IEMMODE_32BIT:
8262 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8263 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8264 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8265 iemCImpl_repe_scas_eax_m32);
8266 case IEMMODE_64BIT:
8267 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8268 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8269 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8270 iemCImpl_repe_scas_eax_m64);
8271 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8272 }
8273 case IEMMODE_64BIT:
8274 switch (pVCpu->iem.s.enmEffAddrMode)
8275 {
8276 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
8277 case IEMMODE_32BIT:
8278 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8279 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8280 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8281 iemCImpl_repe_scas_rax_m32);
8282 case IEMMODE_64BIT:
8283 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8284 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8285 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8286 iemCImpl_repe_scas_rax_m64);
8287 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8288 }
8289 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8290 }
8291 }
8292 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
8293 {
8294 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
8295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8296 switch (pVCpu->iem.s.enmEffOpSize)
8297 {
8298 case IEMMODE_16BIT:
8299 switch (pVCpu->iem.s.enmEffAddrMode)
8300 {
8301 case IEMMODE_16BIT:
8302 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8303 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8304 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8305 iemCImpl_repne_scas_ax_m16);
8306 case IEMMODE_32BIT:
8307 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8308 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8309 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8310 iemCImpl_repne_scas_ax_m32);
8311 case IEMMODE_64BIT:
8312 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8313 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8314 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8315 iemCImpl_repne_scas_ax_m64);
8316 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8317 }
8318 break;
8319 case IEMMODE_32BIT:
8320 switch (pVCpu->iem.s.enmEffAddrMode)
8321 {
8322 case IEMMODE_16BIT:
8323 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8324 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8325 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8326 iemCImpl_repne_scas_eax_m16);
8327 case IEMMODE_32BIT:
8328 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8329 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8330 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8331 iemCImpl_repne_scas_eax_m32);
8332 case IEMMODE_64BIT:
8333 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8334 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8335 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8336 iemCImpl_repne_scas_eax_m64);
8337 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8338 }
8339 case IEMMODE_64BIT:
8340 switch (pVCpu->iem.s.enmEffAddrMode)
8341 {
8342 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
8343 case IEMMODE_32BIT:
8344 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8345 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8346 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8347 iemCImpl_repne_scas_rax_m32);
8348 case IEMMODE_64BIT:
8349 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8350 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8351 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8352 iemCImpl_repne_scas_rax_m64);
8353 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8354 }
8355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8356 }
8357 }
8358
8359 /*
8360 * Annoying double switch here.
8361 * Using ugly macro for implementing the cases, sharing it with scasb.
8362 */
8363 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
8364 switch (pVCpu->iem.s.enmEffOpSize)
8365 {
8366 case IEMMODE_16BIT:
8367 switch (pVCpu->iem.s.enmEffAddrMode)
8368 {
8369 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
8370 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
8371 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
8372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8373 }
8374 break;
8375
8376 case IEMMODE_32BIT:
8377 switch (pVCpu->iem.s.enmEffAddrMode)
8378 {
8379 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
8380 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
8381 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
8382 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8383 }
8384 break;
8385
8386 case IEMMODE_64BIT:
8387 switch (pVCpu->iem.s.enmEffAddrMode)
8388 {
8389 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
8390 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
8391 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
8392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8393 }
8394 break;
8395 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8396 }
8397}
8398
8399#undef IEM_SCAS_CASE
8400
8401/**
8402 * Common 'mov r8, imm8' helper.
8403 */
8404FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
8405{
8406 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8407 IEM_MC_BEGIN(0, 0, 0, 0);
8408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8409 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
8410 IEM_MC_ADVANCE_RIP_AND_FINISH();
8411 IEM_MC_END();
8412}
8413
8414
8415/**
8416 * @opcode 0xb0
8417 */
8418FNIEMOP_DEF(iemOp_mov_AL_Ib)
8419{
8420 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
8421 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8422}
8423
8424
8425/**
8426 * @opcode 0xb1
8427 */
8428FNIEMOP_DEF(iemOp_CL_Ib)
8429{
8430 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
8431 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8432}
8433
8434
8435/**
8436 * @opcode 0xb2
8437 */
8438FNIEMOP_DEF(iemOp_DL_Ib)
8439{
8440 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
8441 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8442}
8443
8444
8445/**
8446 * @opcode 0xb3
8447 */
8448FNIEMOP_DEF(iemOp_BL_Ib)
8449{
8450 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
8451 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8452}
8453
8454
8455/**
8456 * @opcode 0xb4
8457 */
8458FNIEMOP_DEF(iemOp_mov_AH_Ib)
8459{
8460 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
8461 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8462}
8463
8464
8465/**
8466 * @opcode 0xb5
8467 */
8468FNIEMOP_DEF(iemOp_CH_Ib)
8469{
8470 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
8471 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8472}
8473
8474
8475/**
8476 * @opcode 0xb6
8477 */
8478FNIEMOP_DEF(iemOp_DH_Ib)
8479{
8480 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
8481 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8482}
8483
8484
8485/**
8486 * @opcode 0xb7
8487 */
8488FNIEMOP_DEF(iemOp_BH_Ib)
8489{
8490 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
8491 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8492}
8493
8494
8495/**
8496 * Common 'mov regX,immX' helper.
8497 */
8498FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
8499{
8500 switch (pVCpu->iem.s.enmEffOpSize)
8501 {
8502 case IEMMODE_16BIT:
8503 IEM_MC_BEGIN(0, 0, 0, 0);
8504 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8506 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
8507 IEM_MC_ADVANCE_RIP_AND_FINISH();
8508 IEM_MC_END();
8509 break;
8510
8511 case IEMMODE_32BIT:
8512 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8513 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8515 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
8516 IEM_MC_ADVANCE_RIP_AND_FINISH();
8517 IEM_MC_END();
8518 break;
8519
8520 case IEMMODE_64BIT:
8521 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8522 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
8523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8524 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
8525 IEM_MC_ADVANCE_RIP_AND_FINISH();
8526 IEM_MC_END();
8527 break;
8528 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8529 }
8530}
8531
8532
8533/**
8534 * @opcode 0xb8
8535 */
8536FNIEMOP_DEF(iemOp_eAX_Iv)
8537{
8538 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
8539 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8540}
8541
8542
8543/**
8544 * @opcode 0xb9
8545 */
8546FNIEMOP_DEF(iemOp_eCX_Iv)
8547{
8548 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
8549 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8550}
8551
8552
8553/**
8554 * @opcode 0xba
8555 */
8556FNIEMOP_DEF(iemOp_eDX_Iv)
8557{
8558 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
8559 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8560}
8561
8562
8563/**
8564 * @opcode 0xbb
8565 */
8566FNIEMOP_DEF(iemOp_eBX_Iv)
8567{
8568 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
8569 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8570}
8571
8572
8573/**
8574 * @opcode 0xbc
8575 */
8576FNIEMOP_DEF(iemOp_eSP_Iv)
8577{
8578 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
8579 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8580}
8581
8582
8583/**
8584 * @opcode 0xbd
8585 */
8586FNIEMOP_DEF(iemOp_eBP_Iv)
8587{
8588 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
8589 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8590}
8591
8592
8593/**
8594 * @opcode 0xbe
8595 */
8596FNIEMOP_DEF(iemOp_eSI_Iv)
8597{
8598 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
8599 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8600}
8601
8602
8603/**
8604 * @opcode 0xbf
8605 */
8606FNIEMOP_DEF(iemOp_eDI_Iv)
8607{
8608 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
8609 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8610}
8611
8612
8613/**
8614 * @opcode 0xc0
8615 */
8616FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
8617{
8618 IEMOP_HLP_MIN_186();
8619 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8620
8621 /* Need to use a body macro here since the EFLAGS behaviour differs between
8622 the shifts, rotates and rotate w/ carry. Sigh. */
8623#define GRP2_BODY_Eb_Ib(a_pImplExpr) \
8624 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8625 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8626 { \
8627 /* register */ \
8628 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8629 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); \
8630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8631 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8632 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8633 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8634 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8635 IEM_MC_REF_EFLAGS(pEFlags); \
8636 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8637 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8638 IEM_MC_END(); \
8639 } \
8640 else \
8641 { \
8642 /* memory */ \
8643 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0); \
8644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8646 \
8647 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8649 \
8650 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8651 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8652 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8653 \
8654 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8655 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8656 IEM_MC_FETCH_EFLAGS(EFlags); \
8657 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8658 \
8659 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8660 IEM_MC_COMMIT_EFLAGS(EFlags); \
8661 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8662 IEM_MC_END(); \
8663 } (void)0
8664
8665 switch (IEM_GET_MODRM_REG_8(bRm))
8666 {
8667 /**
8668 * @opdone
8669 * @opmaps grp2_c0
8670 * @opcode /0
8671 * @opflclass rotate_count
8672 */
8673 case 0:
8674 {
8675 IEMOP_MNEMONIC2(MI, ROL, rol, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8676 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8677 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8678 break;
8679 }
8680 /**
8681 * @opdone
8682 * @opmaps grp2_c0
8683 * @opcode /1
8684 * @opflclass rotate_count
8685 */
8686 case 1:
8687 {
8688 IEMOP_MNEMONIC2(MI, ROR, ror, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8689 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8690 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8691 break;
8692 }
8693 /**
8694 * @opdone
8695 * @opmaps grp2_c0
8696 * @opcode /2
8697 * @opflclass rotate_carry_count
8698 */
8699 case 2:
8700 {
8701 IEMOP_MNEMONIC2(MI, RCL, rcl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8702 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8703 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8704 break;
8705 }
8706 /**
8707 * @opdone
8708 * @opmaps grp2_c0
8709 * @opcode /3
8710 * @opflclass rotate_carry_count
8711 */
8712 case 3:
8713 {
8714 IEMOP_MNEMONIC2(MI, RCR, rcr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8715 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8716 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8717 break;
8718 }
8719 /**
8720 * @opdone
8721 * @opmaps grp2_c0
8722 * @opcode /4
8723 * @opflclass shift_count
8724 */
8725 case 4:
8726 {
8727 IEMOP_MNEMONIC2(MI, SHL, shl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8728 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8729 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8730 break;
8731 }
8732 /**
8733 * @opdone
8734 * @opmaps grp2_c0
8735 * @opcode /5
8736 * @opflclass shift_count
8737 */
8738 case 5:
8739 {
8740 IEMOP_MNEMONIC2(MI, SHR, shr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8741 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8742 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8743 break;
8744 }
8745 /**
8746 * @opdone
8747 * @opmaps grp2_c0
8748 * @opcode /7
8749 * @opflclass shift_count
8750 */
8751 case 7:
8752 {
8753 IEMOP_MNEMONIC2(MI, SAR, sar, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8754 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8755 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8756 break;
8757 }
8758
8759 /** @opdone */
8760 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8761 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8762 }
8763#undef GRP2_BODY_Eb_Ib
8764}
8765
8766
8767/* Need to use a body macro here since the EFLAGS behaviour differs between
8768 the shifts, rotates and rotate w/ carry. Sigh. */
8769#define GRP2_BODY_Ev_Ib(a_pImplExpr) \
8770 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8771 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8772 { \
8773 /* register */ \
8774 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8775 switch (pVCpu->iem.s.enmEffOpSize) \
8776 { \
8777 case IEMMODE_16BIT: \
8778 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); \
8779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8780 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8781 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8782 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8783 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8784 IEM_MC_REF_EFLAGS(pEFlags); \
8785 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
8786 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8787 IEM_MC_END(); \
8788 break; \
8789 \
8790 case IEMMODE_32BIT: \
8791 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
8792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8793 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8794 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8795 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8796 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8797 IEM_MC_REF_EFLAGS(pEFlags); \
8798 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
8799 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8800 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8801 IEM_MC_END(); \
8802 break; \
8803 \
8804 case IEMMODE_64BIT: \
8805 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
8806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8807 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8808 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8809 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8810 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8811 IEM_MC_REF_EFLAGS(pEFlags); \
8812 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
8813 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8814 IEM_MC_END(); \
8815 break; \
8816 \
8817 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8818 } \
8819 } \
8820 else \
8821 { \
8822 /* memory */ \
8823 switch (pVCpu->iem.s.enmEffOpSize) \
8824 { \
8825 case IEMMODE_16BIT: \
8826 IEM_MC_BEGIN(3, 3, 0, 0); \
8827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8829 \
8830 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8832 \
8833 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8834 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8835 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8836 \
8837 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8838 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8839 IEM_MC_FETCH_EFLAGS(EFlags); \
8840 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
8841 \
8842 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8843 IEM_MC_COMMIT_EFLAGS(EFlags); \
8844 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8845 IEM_MC_END(); \
8846 break; \
8847 \
8848 case IEMMODE_32BIT: \
8849 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
8850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8851 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8852 \
8853 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8855 \
8856 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8857 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8858 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8859 \
8860 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8861 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8862 IEM_MC_FETCH_EFLAGS(EFlags); \
8863 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
8864 \
8865 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8866 IEM_MC_COMMIT_EFLAGS(EFlags); \
8867 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8868 IEM_MC_END(); \
8869 break; \
8870 \
8871 case IEMMODE_64BIT: \
8872 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
8873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8875 \
8876 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8878 \
8879 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8880 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8881 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8882 \
8883 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8884 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8885 IEM_MC_FETCH_EFLAGS(EFlags); \
8886 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
8887 \
8888 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8889 IEM_MC_COMMIT_EFLAGS(EFlags); \
8890 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8891 IEM_MC_END(); \
8892 break; \
8893 \
8894 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8895 } \
8896 } (void)0
8897
8898/**
8899 * @opmaps grp2_c1
8900 * @opcode /0
8901 * @opflclass rotate_count
8902 */
8903FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_Ib, uint8_t, bRm)
8904{
8905 IEMOP_MNEMONIC2(MI, ROL, rol, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8906 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8907}
8908
8909
8910/**
8911 * @opmaps grp2_c1
8912 * @opcode /1
8913 * @opflclass rotate_count
8914 */
8915FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_Ib, uint8_t, bRm)
8916{
8917 IEMOP_MNEMONIC2(MI, ROR, ror, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8918 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8919}
8920
8921
8922/**
8923 * @opmaps grp2_c1
8924 * @opcode /2
8925 * @opflclass rotate_carry_count
8926 */
8927FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_Ib, uint8_t, bRm)
8928{
8929 IEMOP_MNEMONIC2(MI, RCL, rcl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8930 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8931}
8932
8933
8934/**
8935 * @opmaps grp2_c1
8936 * @opcode /3
8937 * @opflclass rotate_carry_count
8938 */
8939FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_Ib, uint8_t, bRm)
8940{
8941 IEMOP_MNEMONIC2(MI, RCR, rcr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8942 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8943}
8944
8945
8946/**
8947 * @opmaps grp2_c1
8948 * @opcode /4
8949 * @opflclass shift_count
8950 */
8951FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_Ib, uint8_t, bRm)
8952{
8953 IEMOP_MNEMONIC2(MI, SHL, shl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8954 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8955}
8956
8957
8958/**
8959 * @opmaps grp2_c1
8960 * @opcode /5
8961 * @opflclass shift_count
8962 */
8963FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_Ib, uint8_t, bRm)
8964{
8965 IEMOP_MNEMONIC2(MI, SHR, shr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8966 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8967}
8968
8969
8970/**
8971 * @opmaps grp2_c1
8972 * @opcode /7
8973 * @opflclass shift_count
8974 */
8975FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_Ib, uint8_t, bRm)
8976{
8977 IEMOP_MNEMONIC2(MI, SAR, sar, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8978 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8979}
8980
8981#undef GRP2_BODY_Ev_Ib
8982
8983/**
8984 * @opcode 0xc1
8985 */
8986FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
8987{
8988 IEMOP_HLP_MIN_186();
8989 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8990
8991 switch (IEM_GET_MODRM_REG_8(bRm))
8992 {
8993 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_Ib, bRm);
8994 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_Ib, bRm);
8995 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_Ib, bRm);
8996 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_Ib, bRm);
8997 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_Ib, bRm);
8998 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_Ib, bRm);
8999 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_Ib, bRm);
9000 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9001 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
9002 }
9003}
9004
9005
9006/**
9007 * @opcode 0xc2
9008 */
9009FNIEMOP_DEF(iemOp_retn_Iw)
9010{
9011 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
9012 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9013 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
9014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9015 switch (pVCpu->iem.s.enmEffOpSize)
9016 {
9017 case IEMMODE_16BIT:
9018 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
9019 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_16, u16Imm);
9020 case IEMMODE_32BIT:
9021 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
9022 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_32, u16Imm);
9023 case IEMMODE_64BIT:
9024 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
9025 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_64, u16Imm);
9026 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9027 }
9028}
9029
9030
9031/**
9032 * @opcode 0xc3
9033 */
9034FNIEMOP_DEF(iemOp_retn)
9035{
9036 IEMOP_MNEMONIC(retn, "retn");
9037 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
9038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9039 switch (pVCpu->iem.s.enmEffOpSize)
9040 {
9041 case IEMMODE_16BIT:
9042 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
9043 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_16);
9044 case IEMMODE_32BIT:
9045 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
9046 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_32);
9047 case IEMMODE_64BIT:
9048 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
9049 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_64);
9050 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9051 }
9052}
9053
9054
9055/**
9056 * @opcode 0xc4
9057 */
9058FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
9059{
9060 /* The LDS instruction is invalid 64-bit mode. In legacy and
9061 compatability mode it is invalid with MOD=3.
9062 The use as a VEX prefix is made possible by assigning the inverted
9063 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
9064 outside of 64-bit mode. VEX is not available in real or v86 mode. */
9065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9066 if ( IEM_IS_64BIT_CODE(pVCpu)
9067 || IEM_IS_MODRM_REG_MODE(bRm) )
9068 {
9069 IEMOP_MNEMONIC(vex3_prefix, "vex3");
9070 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
9071 {
9072 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
9073 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
9074 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
9075 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
9076 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
9077#if 1
9078 AssertCompile(IEM_OP_PRF_SIZE_REX_W == RT_BIT_32(9));
9079 pVCpu->iem.s.fPrefixes |= (uint32_t)(bVex2 & 0x80) << (9 - 7);
9080#else
9081 if (bVex2 & 0x80 /* VEX.W */)
9082 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
9083#endif
9084 if (IEM_IS_64BIT_CODE(pVCpu))
9085 {
9086#if 1
9087 AssertCompile(IEM_OP_PRF_REX_B == RT_BIT_32(25) && IEM_OP_PRF_REX_X == RT_BIT_32(26) && IEM_OP_PRF_REX_R == RT_BIT_32(27));
9088 pVCpu->iem.s.fPrefixes |= (uint32_t)(~bRm & 0xe0) << (25 - 5);
9089#else
9090 if (~bRm & 0x20 /* VEX.~B */)
9091 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_B;
9092 if (~bRm & 0x40 /* VEX.~X */)
9093 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_X;
9094 if (~bRm & 0x80 /* VEX.~R */)
9095 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_R;
9096#endif
9097 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
9098 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
9099 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
9100 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
9101 }
9102 else
9103 {
9104 pVCpu->iem.s.uRexReg = 0;
9105 pVCpu->iem.s.uRexIndex = 0;
9106 pVCpu->iem.s.uRexB = 0;
9107 /** @todo testcase: Will attemps to access registers 8 thru 15 from 16&32 bit
9108 * code raise \#UD or just be ignored? We're ignoring for now... */
9109 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0x7;
9110 }
9111 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
9112 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
9113
9114 switch (bRm & 0x1f)
9115 {
9116 case 1: /* 0x0f lead opcode byte. */
9117#ifdef IEM_WITH_VEX
9118 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
9119#else
9120 IEMOP_BITCH_ABOUT_STUB();
9121 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9122#endif
9123
9124 case 2: /* 0x0f 0x38 lead opcode bytes. */
9125#ifdef IEM_WITH_VEX
9126 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
9127#else
9128 IEMOP_BITCH_ABOUT_STUB();
9129 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9130#endif
9131
9132 case 3: /* 0x0f 0x3a lead opcode bytes. */
9133#ifdef IEM_WITH_VEX
9134 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
9135#else
9136 IEMOP_BITCH_ABOUT_STUB();
9137 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9138#endif
9139
9140 default:
9141 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
9142 IEMOP_RAISE_INVALID_OPCODE_RET();
9143 }
9144 }
9145 Log(("VEX3: VEX support disabled!\n"));
9146 IEMOP_RAISE_INVALID_OPCODE_RET();
9147 }
9148
9149 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
9150 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
9151}
9152
9153
9154/**
9155 * @opcode 0xc5
9156 */
9157FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
9158{
9159 /* The LES instruction is invalid 64-bit mode. In legacy and
9160 compatability mode it is invalid with MOD=3.
9161 The use as a VEX prefix is made possible by assigning the inverted
9162 REX.R to the top MOD bit, and the top bit in the inverted register
9163 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
9164 to accessing registers 0..7 in this VEX form. */
9165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9166 if ( IEM_IS_64BIT_CODE(pVCpu)
9167 || IEM_IS_MODRM_REG_MODE(bRm))
9168 {
9169 IEMOP_MNEMONIC(vex2_prefix, "vex2");
9170 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
9171 {
9172 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
9173 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
9174 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
9175 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
9176 AssertCompile(IEM_OP_PRF_REX_R == RT_BIT_32(27));
9177 pVCpu->iem.s.fPrefixes |= (uint32_t)(~bRm & 0x80) << (27 - 7);
9178 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
9179 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
9180 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
9181 pVCpu->iem.s.idxPrefix = bRm & 0x3;
9182
9183#ifdef IEM_WITH_VEX
9184 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
9185#else
9186 IEMOP_BITCH_ABOUT_STUB();
9187 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9188#endif
9189 }
9190
9191 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
9192 Log(("VEX2: VEX support disabled!\n"));
9193 IEMOP_RAISE_INVALID_OPCODE_RET();
9194 }
9195
9196 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
9197 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
9198}
9199
9200
9201/**
9202 * @opcode 0xc6
9203 */
9204FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
9205{
9206 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9207 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
9208 IEMOP_RAISE_INVALID_OPCODE_RET();
9209 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
9210
9211 if (IEM_IS_MODRM_REG_MODE(bRm))
9212 {
9213 /* register access */
9214 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9215 IEM_MC_BEGIN(0, 0, 0, 0);
9216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9217 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
9218 IEM_MC_ADVANCE_RIP_AND_FINISH();
9219 IEM_MC_END();
9220 }
9221 else
9222 {
9223 /* memory access. */
9224 IEM_MC_BEGIN(0, 1, 0, 0);
9225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9227 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9229 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
9230 IEM_MC_ADVANCE_RIP_AND_FINISH();
9231 IEM_MC_END();
9232 }
9233}
9234
9235
9236/**
9237 * @opcode 0xc7
9238 */
9239FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
9240{
9241 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9242 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
9243 IEMOP_RAISE_INVALID_OPCODE_RET();
9244 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
9245
9246 if (IEM_IS_MODRM_REG_MODE(bRm))
9247 {
9248 /* register access */
9249 switch (pVCpu->iem.s.enmEffOpSize)
9250 {
9251 case IEMMODE_16BIT:
9252 IEM_MC_BEGIN(0, 0, 0, 0);
9253 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9255 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
9256 IEM_MC_ADVANCE_RIP_AND_FINISH();
9257 IEM_MC_END();
9258 break;
9259
9260 case IEMMODE_32BIT:
9261 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
9262 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9264 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
9265 IEM_MC_ADVANCE_RIP_AND_FINISH();
9266 IEM_MC_END();
9267 break;
9268
9269 case IEMMODE_64BIT:
9270 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
9271 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9273 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
9274 IEM_MC_ADVANCE_RIP_AND_FINISH();
9275 IEM_MC_END();
9276 break;
9277
9278 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9279 }
9280 }
9281 else
9282 {
9283 /* memory access. */
9284 switch (pVCpu->iem.s.enmEffOpSize)
9285 {
9286 case IEMMODE_16BIT:
9287 IEM_MC_BEGIN(0, 1, 0, 0);
9288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9290 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9292 IEM_MC_STORE_MEM_U16_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
9293 IEM_MC_ADVANCE_RIP_AND_FINISH();
9294 IEM_MC_END();
9295 break;
9296
9297 case IEMMODE_32BIT:
9298 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
9299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9301 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9303 IEM_MC_STORE_MEM_U32_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
9304 IEM_MC_ADVANCE_RIP_AND_FINISH();
9305 IEM_MC_END();
9306 break;
9307
9308 case IEMMODE_64BIT:
9309 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
9310 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9312 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9314 IEM_MC_STORE_MEM_U64_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
9315 IEM_MC_ADVANCE_RIP_AND_FINISH();
9316 IEM_MC_END();
9317 break;
9318
9319 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9320 }
9321 }
9322}
9323
9324
9325
9326
9327/**
9328 * @opcode 0xc8
9329 */
9330FNIEMOP_DEF(iemOp_enter_Iw_Ib)
9331{
9332 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
9333 IEMOP_HLP_MIN_186();
9334 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9335 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
9336 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
9337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9338 IEM_MC_DEFER_TO_CIMPL_3_RET(0,
9339 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9340 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
9341 iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
9342}
9343
9344
9345/**
9346 * @opcode 0xc9
9347 */
9348FNIEMOP_DEF(iemOp_leave)
9349{
9350 IEMOP_MNEMONIC(leave, "leave");
9351 IEMOP_HLP_MIN_186();
9352 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9354 IEM_MC_DEFER_TO_CIMPL_1_RET(0,
9355 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9356 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
9357 iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
9358}
9359
9360
9361/**
9362 * @opcode 0xca
9363 */
9364FNIEMOP_DEF(iemOp_retf_Iw)
9365{
9366 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
9367 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9369 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
9370 | IEM_CIMPL_F_MODE,
9371 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9372 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9373 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9374 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9375 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9376 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9377 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9378 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9379 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9380 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9381 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9382 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9383 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9384 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9385 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9386 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9387 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9388 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
9389}
9390
9391
9392/**
9393 * @opcode 0xcb
9394 */
9395FNIEMOP_DEF(iemOp_retf)
9396{
9397 IEMOP_MNEMONIC(retf, "retf");
9398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9399 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
9400 | IEM_CIMPL_F_MODE,
9401 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9402 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9403 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9404 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9405 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9406 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9407 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9408 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9409 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9410 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9411 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9412 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9413 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9414 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9415 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9416 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9417 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9418 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
9419}
9420
9421
9422/**
9423 * @opcode 0xcc
9424 */
9425FNIEMOP_DEF(iemOp_int3)
9426{
9427 IEMOP_MNEMONIC(int3, "int3");
9428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9429 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9430 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
9431 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
9432}
9433
9434
9435/**
9436 * @opcode 0xcd
9437 */
9438FNIEMOP_DEF(iemOp_int_Ib)
9439{
9440 IEMOP_MNEMONIC(int_Ib, "int Ib");
9441 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
9442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9443 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9444 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, UINT64_MAX,
9445 iemCImpl_int, u8Int, IEMINT_INTN);
9446 /** @todo make task-switches, ring-switches, ++ return non-zero status */
9447}
9448
9449
9450/**
9451 * @opcode 0xce
9452 */
9453FNIEMOP_DEF(iemOp_into)
9454{
9455 IEMOP_MNEMONIC(into, "into");
9456 IEMOP_HLP_NO_64BIT();
9457 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9458 | IEM_CIMPL_F_BRANCH_CONDITIONAL | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
9459 UINT64_MAX,
9460 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
9461 /** @todo make task-switches, ring-switches, ++ return non-zero status */
9462}
9463
9464
9465/**
9466 * @opcode 0xcf
9467 */
9468FNIEMOP_DEF(iemOp_iret)
9469{
9470 IEMOP_MNEMONIC(iret, "iret");
9471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9472 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9473 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
9474 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9475 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9476 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9477 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9478 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9479 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9480 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9481 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9482 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9483 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9484 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9485 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9486 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9487 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9488 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9489 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9490 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9491 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
9492 /* Segment registers are sanitized when returning to an outer ring, or fully
9493 reloaded when returning to v86 mode. Thus the large flush list above. */
9494}
9495
9496
9497/**
9498 * @opcode 0xd0
9499 */
9500FNIEMOP_DEF(iemOp_Grp2_Eb_1)
9501{
9502 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9503
9504 /* Need to use a body macro here since the EFLAGS behaviour differs between
9505 the shifts, rotates and rotate w/ carry. Sigh. */
9506#define GRP2_BODY_Eb_1(a_pImplExpr) \
9507 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9508 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9509 { \
9510 /* register */ \
9511 IEM_MC_BEGIN(3, 0, 0, 0); \
9512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9513 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9514 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
9515 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9516 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9517 IEM_MC_REF_EFLAGS(pEFlags); \
9518 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9519 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9520 IEM_MC_END(); \
9521 } \
9522 else \
9523 { \
9524 /* memory */ \
9525 IEM_MC_BEGIN(3, 3, 0, 0); \
9526 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9527 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
9528 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9530 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9531 \
9532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9534 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9535 IEM_MC_FETCH_EFLAGS(EFlags); \
9536 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9537 \
9538 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9539 IEM_MC_COMMIT_EFLAGS(EFlags); \
9540 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9541 IEM_MC_END(); \
9542 } (void)0
9543
9544 switch (IEM_GET_MODRM_REG_8(bRm))
9545 {
9546 /**
9547 * @opdone
9548 * @opmaps grp2_d0
9549 * @opcode /0
9550 * @opflclass rotate_1
9551 */
9552 case 0:
9553 {
9554 IEMOP_MNEMONIC2(M1, ROL, rol, Eb, 1, DISOPTYPE_HARMLESS, 0);
9555 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9556 break;
9557 }
9558 /**
9559 * @opdone
9560 * @opmaps grp2_d0
9561 * @opcode /1
9562 * @opflclass rotate_1
9563 */
9564 case 1:
9565 {
9566 IEMOP_MNEMONIC2(M1, ROR, ror, Eb, 1, DISOPTYPE_HARMLESS, 0);
9567 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9568 break;
9569 }
9570 /**
9571 * @opdone
9572 * @opmaps grp2_d0
9573 * @opcode /2
9574 * @opflclass rotate_carry_1
9575 */
9576 case 2:
9577 {
9578 IEMOP_MNEMONIC2(M1, RCL, rcl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9579 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9580 break;
9581 }
9582 /**
9583 * @opdone
9584 * @opmaps grp2_d0
9585 * @opcode /3
9586 * @opflclass rotate_carry_1
9587 */
9588 case 3:
9589 {
9590 IEMOP_MNEMONIC2(M1, RCR, rcr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9591 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9592 break;
9593 }
9594 /**
9595 * @opdone
9596 * @opmaps grp2_d0
9597 * @opcode /4
9598 * @opflclass shift_1
9599 */
9600 case 4:
9601 {
9602 IEMOP_MNEMONIC2(M1, SHL, shl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9603 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9604 break;
9605 }
9606 /**
9607 * @opdone
9608 * @opmaps grp2_d0
9609 * @opcode /5
9610 * @opflclass shift_1
9611 */
9612 case 5:
9613 {
9614 IEMOP_MNEMONIC2(M1, SHR, shr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9615 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9616 break;
9617 }
9618 /**
9619 * @opdone
9620 * @opmaps grp2_d0
9621 * @opcode /7
9622 * @opflclass shift_1
9623 */
9624 case 7:
9625 {
9626 IEMOP_MNEMONIC2(M1, SAR, sar, Eb, 1, DISOPTYPE_HARMLESS, 0);
9627 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9628 break;
9629 }
9630 /** @opdone */
9631 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9632 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9633 }
9634#undef GRP2_BODY_Eb_1
9635}
9636
9637
9638/* Need to use a body macro here since the EFLAGS behaviour differs between
9639 the shifts, rotates and rotate w/ carry. Sigh. */
9640#define GRP2_BODY_Ev_1(a_pImplExpr) \
9641 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9642 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9643 { \
9644 /* register */ \
9645 switch (pVCpu->iem.s.enmEffOpSize) \
9646 { \
9647 case IEMMODE_16BIT: \
9648 IEM_MC_BEGIN(3, 0, 0, 0); \
9649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9650 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9651 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9652 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9653 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9654 IEM_MC_REF_EFLAGS(pEFlags); \
9655 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9656 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9657 IEM_MC_END(); \
9658 break; \
9659 \
9660 case IEMMODE_32BIT: \
9661 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9663 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9664 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9665 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9666 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9667 IEM_MC_REF_EFLAGS(pEFlags); \
9668 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9669 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9670 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9671 IEM_MC_END(); \
9672 break; \
9673 \
9674 case IEMMODE_64BIT: \
9675 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9677 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9678 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9679 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9680 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9681 IEM_MC_REF_EFLAGS(pEFlags); \
9682 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9683 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9684 IEM_MC_END(); \
9685 break; \
9686 \
9687 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9688 } \
9689 } \
9690 else \
9691 { \
9692 /* memory */ \
9693 switch (pVCpu->iem.s.enmEffOpSize) \
9694 { \
9695 case IEMMODE_16BIT: \
9696 IEM_MC_BEGIN(3, 3, 0, 0); \
9697 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9698 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9699 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9701 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9702 \
9703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9705 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9706 IEM_MC_FETCH_EFLAGS(EFlags); \
9707 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9708 \
9709 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9710 IEM_MC_COMMIT_EFLAGS(EFlags); \
9711 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9712 IEM_MC_END(); \
9713 break; \
9714 \
9715 case IEMMODE_32BIT: \
9716 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
9717 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9718 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9719 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9721 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9722 \
9723 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9725 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9726 IEM_MC_FETCH_EFLAGS(EFlags); \
9727 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9728 \
9729 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9730 IEM_MC_COMMIT_EFLAGS(EFlags); \
9731 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9732 IEM_MC_END(); \
9733 break; \
9734 \
9735 case IEMMODE_64BIT: \
9736 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
9737 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9738 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9739 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9741 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9742 \
9743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9745 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9746 IEM_MC_FETCH_EFLAGS(EFlags); \
9747 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9748 \
9749 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9750 IEM_MC_COMMIT_EFLAGS(EFlags); \
9751 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9752 IEM_MC_END(); \
9753 break; \
9754 \
9755 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9756 } \
9757 } (void)0
9758
9759/**
9760 * @opmaps grp2_d1
9761 * @opcode /0
9762 * @opflclass rotate_1
9763 */
9764FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_1, uint8_t, bRm)
9765{
9766 IEMOP_MNEMONIC2(M1, ROL, rol, Ev, 1, DISOPTYPE_HARMLESS, 0);
9767 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9768}
9769
9770
9771/**
9772 * @opmaps grp2_d1
9773 * @opcode /1
9774 * @opflclass rotate_1
9775 */
9776FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_1, uint8_t, bRm)
9777{
9778 IEMOP_MNEMONIC2(M1, ROR, ror, Ev, 1, DISOPTYPE_HARMLESS, 0);
9779 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9780}
9781
9782
9783/**
9784 * @opmaps grp2_d1
9785 * @opcode /2
9786 * @opflclass rotate_carry_1
9787 */
9788FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_1, uint8_t, bRm)
9789{
9790 IEMOP_MNEMONIC2(M1, RCL, rcl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9791 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9792}
9793
9794
9795/**
9796 * @opmaps grp2_d1
9797 * @opcode /3
9798 * @opflclass rotate_carry_1
9799 */
9800FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_1, uint8_t, bRm)
9801{
9802 IEMOP_MNEMONIC2(M1, RCR, rcr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9803 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9804}
9805
9806
9807/**
9808 * @opmaps grp2_d1
9809 * @opcode /4
9810 * @opflclass shift_1
9811 */
9812FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_1, uint8_t, bRm)
9813{
9814 IEMOP_MNEMONIC2(M1, SHL, shl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9815 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9816}
9817
9818
9819/**
9820 * @opmaps grp2_d1
9821 * @opcode /5
9822 * @opflclass shift_1
9823 */
9824FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_1, uint8_t, bRm)
9825{
9826 IEMOP_MNEMONIC2(M1, SHR, shr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9827 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9828}
9829
9830
9831/**
9832 * @opmaps grp2_d1
9833 * @opcode /7
9834 * @opflclass shift_1
9835 */
9836FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_1, uint8_t, bRm)
9837{
9838 IEMOP_MNEMONIC2(M1, SAR, sar, Ev, 1, DISOPTYPE_HARMLESS, 0);
9839 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9840}
9841
9842#undef GRP2_BODY_Ev_1
9843
9844/**
9845 * @opcode 0xd1
9846 */
9847FNIEMOP_DEF(iemOp_Grp2_Ev_1)
9848{
9849 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9850 switch (IEM_GET_MODRM_REG_8(bRm))
9851 {
9852 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_1, bRm);
9853 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_1, bRm);
9854 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_1, bRm);
9855 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_1, bRm);
9856 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_1, bRm);
9857 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_1, bRm);
9858 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_1, bRm);
9859 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9860 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9861 }
9862}
9863
9864
9865/**
9866 * @opcode 0xd2
9867 */
9868FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
9869{
9870 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9871
9872 /* Need to use a body macro here since the EFLAGS behaviour differs between
9873 the shifts, rotates and rotate w/ carry. Sigh. */
9874#define GRP2_BODY_Eb_CL(a_pImplExpr) \
9875 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9876 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9877 { \
9878 /* register */ \
9879 IEM_MC_BEGIN(3, 0, 0, 0); \
9880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9881 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9882 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9883 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9884 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9885 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9886 IEM_MC_REF_EFLAGS(pEFlags); \
9887 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9888 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9889 IEM_MC_END(); \
9890 } \
9891 else \
9892 { \
9893 /* memory */ \
9894 IEM_MC_BEGIN(3, 3, 0, 0); \
9895 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9896 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9897 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9898 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9899 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9900 \
9901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9903 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9904 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9905 IEM_MC_FETCH_EFLAGS(EFlags); \
9906 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9907 \
9908 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9909 IEM_MC_COMMIT_EFLAGS(EFlags); \
9910 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9911 IEM_MC_END(); \
9912 } (void)0
9913
9914 switch (IEM_GET_MODRM_REG_8(bRm))
9915 {
9916 /**
9917 * @opdone
9918 * @opmaps grp2_d0
9919 * @opcode /0
9920 * @opflclass rotate_count
9921 */
9922 case 0:
9923 {
9924 IEMOP_MNEMONIC2EX(rol_Eb_CL, "rol Eb,CL", M_CL, ROL, rol, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9925 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9926 break;
9927 }
9928 /**
9929 * @opdone
9930 * @opmaps grp2_d0
9931 * @opcode /1
9932 * @opflclass rotate_count
9933 */
9934 case 1:
9935 {
9936 IEMOP_MNEMONIC2EX(ror_Eb_CL, "ror Eb,CL", M_CL, ROR, ror, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9937 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9938 break;
9939 }
9940 /**
9941 * @opdone
9942 * @opmaps grp2_d0
9943 * @opcode /2
9944 * @opflclass rotate_carry_count
9945 */
9946 case 2:
9947 {
9948 IEMOP_MNEMONIC2EX(rcl_Eb_CL, "rcl Eb,CL", M_CL, RCL, rcl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9949 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9950 break;
9951 }
9952 /**
9953 * @opdone
9954 * @opmaps grp2_d0
9955 * @opcode /3
9956 * @opflclass rotate_carry_count
9957 */
9958 case 3:
9959 {
9960 IEMOP_MNEMONIC2EX(rcr_Eb_CL, "rcr Eb,CL", M_CL, RCR, rcr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9961 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9962 break;
9963 }
9964 /**
9965 * @opdone
9966 * @opmaps grp2_d0
9967 * @opcode /4
9968 * @opflclass shift_count
9969 */
9970 case 4:
9971 {
9972 IEMOP_MNEMONIC2EX(shl_Eb_CL, "shl Eb,CL", M_CL, SHL, shl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9973 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9974 break;
9975 }
9976 /**
9977 * @opdone
9978 * @opmaps grp2_d0
9979 * @opcode /5
9980 * @opflclass shift_count
9981 */
9982 case 5:
9983 {
9984 IEMOP_MNEMONIC2EX(shr_Eb_CL, "shr Eb,CL", M_CL, SHR, shr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9985 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9986 break;
9987 }
9988 /**
9989 * @opdone
9990 * @opmaps grp2_d0
9991 * @opcode /7
9992 * @opflclass shift_count
9993 */
9994 case 7:
9995 {
9996 IEMOP_MNEMONIC2EX(sar_Eb_CL, "sar Eb,CL", M_CL, SAR, sar, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9997 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9998 break;
9999 }
10000 /** @opdone */
10001 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10002 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
10003 }
10004#undef GRP2_BODY_Eb_CL
10005}
10006
10007
10008/* Need to use a body macro here since the EFLAGS behaviour differs between
10009 the shifts, rotates and rotate w/ carry. Sigh. */
10010#define GRP2_BODY_Ev_CL(a_pImplExpr) \
10011 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
10012 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10013 { \
10014 /* register */ \
10015 switch (pVCpu->iem.s.enmEffOpSize) \
10016 { \
10017 case IEMMODE_16BIT: \
10018 IEM_MC_BEGIN(3, 0, 0, 0); \
10019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10020 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10021 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
10022 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10023 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10024 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10025 IEM_MC_REF_EFLAGS(pEFlags); \
10026 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
10027 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10028 IEM_MC_END(); \
10029 break; \
10030 \
10031 case IEMMODE_32BIT: \
10032 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
10033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10034 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10035 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
10036 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10037 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10038 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10039 IEM_MC_REF_EFLAGS(pEFlags); \
10040 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
10041 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
10042 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10043 IEM_MC_END(); \
10044 break; \
10045 \
10046 case IEMMODE_64BIT: \
10047 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
10048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10049 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10050 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
10051 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10052 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10053 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10054 IEM_MC_REF_EFLAGS(pEFlags); \
10055 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
10056 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10057 IEM_MC_END(); \
10058 break; \
10059 \
10060 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10061 } \
10062 } \
10063 else \
10064 { \
10065 /* memory */ \
10066 switch (pVCpu->iem.s.enmEffOpSize) \
10067 { \
10068 case IEMMODE_16BIT: \
10069 IEM_MC_BEGIN(3, 3, 0, 0); \
10070 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10071 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
10072 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
10073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10074 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10075 \
10076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10078 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10079 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10080 IEM_MC_FETCH_EFLAGS(EFlags); \
10081 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
10082 \
10083 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10084 IEM_MC_COMMIT_EFLAGS(EFlags); \
10085 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10086 IEM_MC_END(); \
10087 break; \
10088 \
10089 case IEMMODE_32BIT: \
10090 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
10091 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10092 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
10093 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
10094 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10095 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10096 \
10097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10099 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10100 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10101 IEM_MC_FETCH_EFLAGS(EFlags); \
10102 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
10103 \
10104 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10105 IEM_MC_COMMIT_EFLAGS(EFlags); \
10106 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10107 IEM_MC_END(); \
10108 break; \
10109 \
10110 case IEMMODE_64BIT: \
10111 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
10112 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10113 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
10114 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
10115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10116 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10117 \
10118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10120 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10121 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10122 IEM_MC_FETCH_EFLAGS(EFlags); \
10123 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
10124 \
10125 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10126 IEM_MC_COMMIT_EFLAGS(EFlags); \
10127 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10128 IEM_MC_END(); \
10129 break; \
10130 \
10131 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10132 } \
10133 } (void)0
10134
10135
10136/**
10137 * @opmaps grp2_d0
10138 * @opcode /0
10139 * @opflclass rotate_count
10140 */
10141FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_CL, uint8_t, bRm)
10142{
10143 IEMOP_MNEMONIC2EX(rol_Ev_CL, "rol Ev,CL", M_CL, ROL, rol, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10144 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
10145}
10146
10147
10148/**
10149 * @opmaps grp2_d0
10150 * @opcode /1
10151 * @opflclass rotate_count
10152 */
10153FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_CL, uint8_t, bRm)
10154{
10155 IEMOP_MNEMONIC2EX(ror_Ev_CL, "ror Ev,CL", M_CL, ROR, ror, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10156 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
10157}
10158
10159
10160/**
10161 * @opmaps grp2_d0
10162 * @opcode /2
10163 * @opflclass rotate_carry_count
10164 */
10165FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_CL, uint8_t, bRm)
10166{
10167 IEMOP_MNEMONIC2EX(rcl_Ev_CL, "rcl Ev,CL", M_CL, RCL, rcl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10168 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
10169}
10170
10171
10172/**
10173 * @opmaps grp2_d0
10174 * @opcode /3
10175 * @opflclass rotate_carry_count
10176 */
10177FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_CL, uint8_t, bRm)
10178{
10179 IEMOP_MNEMONIC2EX(rcr_Ev_CL, "rcr Ev,CL", M_CL, RCR, rcr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10180 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
10181}
10182
10183
10184/**
10185 * @opmaps grp2_d0
10186 * @opcode /4
10187 * @opflclass shift_count
10188 */
10189FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_CL, uint8_t, bRm)
10190{
10191 IEMOP_MNEMONIC2EX(shl_Ev_CL, "shl Ev,CL", M_CL, SHL, shl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10192 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
10193}
10194
10195
10196/**
10197 * @opmaps grp2_d0
10198 * @opcode /5
10199 * @opflclass shift_count
10200 */
10201FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_CL, uint8_t, bRm)
10202{
10203 IEMOP_MNEMONIC2EX(shr_Ev_CL, "shr Ev,CL", M_CL, SHR, shr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10204 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
10205}
10206
10207
10208/**
10209 * @opmaps grp2_d0
10210 * @opcode /7
10211 * @opflclass shift_count
10212 */
10213FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_CL, uint8_t, bRm)
10214{
10215 IEMOP_MNEMONIC2EX(sar_Ev_CL, "sar Ev,CL", M_CL, SAR, sar, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10216 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
10217}
10218
10219#undef GRP2_BODY_Ev_CL
10220
10221/**
10222 * @opcode 0xd3
10223 */
10224FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
10225{
10226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10227 switch (IEM_GET_MODRM_REG_8(bRm))
10228 {
10229 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_CL, bRm);
10230 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_CL, bRm);
10231 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_CL, bRm);
10232 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_CL, bRm);
10233 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_CL, bRm);
10234 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_CL, bRm);
10235 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_CL, bRm);
10236 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10237 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
10238 }
10239}
10240
10241
10242/**
10243 * @opcode 0xd4
10244 * @opflmodify cf,pf,af,zf,sf,of
10245 * @opflundef cf,af,of
10246 */
10247FNIEMOP_DEF(iemOp_aam_Ib)
10248{
10249/** @todo testcase: aam */
10250 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
10251 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
10252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10253 IEMOP_HLP_NO_64BIT();
10254 if (!bImm)
10255 IEMOP_RAISE_DIVIDE_ERROR_RET();
10256 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aam, bImm);
10257}
10258
10259
10260/**
10261 * @opcode 0xd5
10262 * @opflmodify cf,pf,af,zf,sf,of
10263 * @opflundef cf,af,of
10264 */
10265FNIEMOP_DEF(iemOp_aad_Ib)
10266{
10267/** @todo testcase: aad? */
10268 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
10269 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
10270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10271 IEMOP_HLP_NO_64BIT();
10272 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aad, bImm);
10273}
10274
10275
10276/**
10277 * @opcode 0xd6
10278 */
10279FNIEMOP_DEF(iemOp_salc)
10280{
10281 IEMOP_MNEMONIC(salc, "salc");
10282 IEMOP_HLP_NO_64BIT();
10283
10284 IEM_MC_BEGIN(0, 0, 0, 0);
10285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10286 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10287 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
10288 } IEM_MC_ELSE() {
10289 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
10290 } IEM_MC_ENDIF();
10291 IEM_MC_ADVANCE_RIP_AND_FINISH();
10292 IEM_MC_END();
10293}
10294
10295
10296/**
10297 * @opcode 0xd7
10298 */
10299FNIEMOP_DEF(iemOp_xlat)
10300{
10301 IEMOP_MNEMONIC(xlat, "xlat");
10302 switch (pVCpu->iem.s.enmEffAddrMode)
10303 {
10304 case IEMMODE_16BIT:
10305 IEM_MC_BEGIN(2, 0, 0, 0);
10306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10307 IEM_MC_LOCAL(uint8_t, u8Tmp);
10308 IEM_MC_LOCAL(uint16_t, u16Addr);
10309 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
10310 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
10311 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
10312 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10313 IEM_MC_ADVANCE_RIP_AND_FINISH();
10314 IEM_MC_END();
10315 break;
10316
10317 case IEMMODE_32BIT:
10318 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
10319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10320 IEM_MC_LOCAL(uint8_t, u8Tmp);
10321 IEM_MC_LOCAL(uint32_t, u32Addr);
10322 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
10323 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
10324 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
10325 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10326 IEM_MC_ADVANCE_RIP_AND_FINISH();
10327 IEM_MC_END();
10328 break;
10329
10330 case IEMMODE_64BIT:
10331 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
10332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10333 IEM_MC_LOCAL(uint8_t, u8Tmp);
10334 IEM_MC_LOCAL(uint64_t, u64Addr);
10335 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
10336 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
10337 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
10338 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10339 IEM_MC_ADVANCE_RIP_AND_FINISH();
10340 IEM_MC_END();
10341 break;
10342
10343 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10344 }
10345}
10346
10347
10348/**
10349 * Common worker for FPU instructions working on ST0 and STn, and storing the
10350 * result in ST0.
10351 *
10352 * @param bRm Mod R/M byte.
10353 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10354 */
10355FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10356{
10357 IEM_MC_BEGIN(3, 1, 0, 0);
10358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10359 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10360 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10361 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10362 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10363
10364 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10365 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10366 IEM_MC_PREPARE_FPU_USAGE();
10367 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10368 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10369 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10370 } IEM_MC_ELSE() {
10371 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10372 } IEM_MC_ENDIF();
10373 IEM_MC_ADVANCE_RIP_AND_FINISH();
10374
10375 IEM_MC_END();
10376}
10377
10378
10379/**
10380 * Common worker for FPU instructions working on ST0 and STn, and only affecting
10381 * flags.
10382 *
10383 * @param bRm Mod R/M byte.
10384 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10385 */
10386FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10387{
10388 IEM_MC_BEGIN(3, 1, 0, 0);
10389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10390 IEM_MC_LOCAL(uint16_t, u16Fsw);
10391 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10392 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10393 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10394
10395 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10396 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10397 IEM_MC_PREPARE_FPU_USAGE();
10398 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10399 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10400 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10401 } IEM_MC_ELSE() {
10402 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10403 } IEM_MC_ENDIF();
10404 IEM_MC_ADVANCE_RIP_AND_FINISH();
10405
10406 IEM_MC_END();
10407}
10408
10409
10410/**
10411 * Common worker for FPU instructions working on ST0 and STn, only affecting
10412 * flags, and popping when done.
10413 *
10414 * @param bRm Mod R/M byte.
10415 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10416 */
10417FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10418{
10419 IEM_MC_BEGIN(3, 1, 0, 0);
10420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10421 IEM_MC_LOCAL(uint16_t, u16Fsw);
10422 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10423 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10424 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10425
10426 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10427 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10428 IEM_MC_PREPARE_FPU_USAGE();
10429 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10430 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10431 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10432 } IEM_MC_ELSE() {
10433 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10434 } IEM_MC_ENDIF();
10435 IEM_MC_ADVANCE_RIP_AND_FINISH();
10436
10437 IEM_MC_END();
10438}
10439
10440
10441/** Opcode 0xd8 11/0. */
10442FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
10443{
10444 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
10445 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
10446}
10447
10448
10449/** Opcode 0xd8 11/1. */
10450FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
10451{
10452 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
10453 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
10454}
10455
10456
10457/** Opcode 0xd8 11/2. */
10458FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
10459{
10460 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
10461 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
10462}
10463
10464
10465/** Opcode 0xd8 11/3. */
10466FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
10467{
10468 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
10469 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
10470}
10471
10472
10473/** Opcode 0xd8 11/4. */
10474FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
10475{
10476 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
10477 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
10478}
10479
10480
10481/** Opcode 0xd8 11/5. */
10482FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
10483{
10484 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
10485 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
10486}
10487
10488
10489/** Opcode 0xd8 11/6. */
10490FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
10491{
10492 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
10493 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
10494}
10495
10496
10497/** Opcode 0xd8 11/7. */
10498FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
10499{
10500 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
10501 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
10502}
10503
10504
10505/**
10506 * Common worker for FPU instructions working on ST0 and an m32r, and storing
10507 * the result in ST0.
10508 *
10509 * @param bRm Mod R/M byte.
10510 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10511 */
10512FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
10513{
10514 IEM_MC_BEGIN(3, 3, 0, 0);
10515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10516 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10517 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10518 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10519 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10520 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10521
10522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10524
10525 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10526 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10527 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10528
10529 IEM_MC_PREPARE_FPU_USAGE();
10530 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10531 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
10532 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10533 } IEM_MC_ELSE() {
10534 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10535 } IEM_MC_ENDIF();
10536 IEM_MC_ADVANCE_RIP_AND_FINISH();
10537
10538 IEM_MC_END();
10539}
10540
10541
10542/** Opcode 0xd8 !11/0. */
10543FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
10544{
10545 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
10546 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
10547}
10548
10549
10550/** Opcode 0xd8 !11/1. */
10551FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
10552{
10553 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
10554 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
10555}
10556
10557
10558/** Opcode 0xd8 !11/2. */
10559FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
10560{
10561 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
10562
10563 IEM_MC_BEGIN(3, 3, 0, 0);
10564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10565 IEM_MC_LOCAL(uint16_t, u16Fsw);
10566 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10567 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10568 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10569 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10570
10571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10573
10574 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10575 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10576 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10577
10578 IEM_MC_PREPARE_FPU_USAGE();
10579 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10580 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10581 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10582 } IEM_MC_ELSE() {
10583 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10584 } IEM_MC_ENDIF();
10585 IEM_MC_ADVANCE_RIP_AND_FINISH();
10586
10587 IEM_MC_END();
10588}
10589
10590
10591/** Opcode 0xd8 !11/3. */
10592FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
10593{
10594 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
10595
10596 IEM_MC_BEGIN(3, 3, 0, 0);
10597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10598 IEM_MC_LOCAL(uint16_t, u16Fsw);
10599 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10600 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10601 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10602 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10603
10604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10606
10607 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10608 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10609 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10610
10611 IEM_MC_PREPARE_FPU_USAGE();
10612 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10613 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10614 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10615 } IEM_MC_ELSE() {
10616 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10617 } IEM_MC_ENDIF();
10618 IEM_MC_ADVANCE_RIP_AND_FINISH();
10619
10620 IEM_MC_END();
10621}
10622
10623
10624/** Opcode 0xd8 !11/4. */
10625FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
10626{
10627 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
10628 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
10629}
10630
10631
10632/** Opcode 0xd8 !11/5. */
10633FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
10634{
10635 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
10636 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
10637}
10638
10639
10640/** Opcode 0xd8 !11/6. */
10641FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
10642{
10643 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
10644 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
10645}
10646
10647
10648/** Opcode 0xd8 !11/7. */
10649FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
10650{
10651 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
10652 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
10653}
10654
10655
10656/**
10657 * @opcode 0xd8
10658 */
10659FNIEMOP_DEF(iemOp_EscF0)
10660{
10661 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10662 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
10663
10664 if (IEM_IS_MODRM_REG_MODE(bRm))
10665 {
10666 switch (IEM_GET_MODRM_REG_8(bRm))
10667 {
10668 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
10669 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
10670 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
10671 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
10672 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
10673 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
10674 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
10675 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
10676 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10677 }
10678 }
10679 else
10680 {
10681 switch (IEM_GET_MODRM_REG_8(bRm))
10682 {
10683 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
10684 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
10685 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
10686 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
10687 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
10688 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
10689 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
10690 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
10691 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10692 }
10693 }
10694}
10695
10696
10697/** Opcode 0xd9 /0 mem32real
10698 * @sa iemOp_fld_m64r */
10699FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
10700{
10701 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
10702
10703 IEM_MC_BEGIN(2, 3, 0, 0);
10704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10705 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10706 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
10707 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10708 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
10709
10710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10712
10713 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10714 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10715 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10716 IEM_MC_PREPARE_FPU_USAGE();
10717 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10718 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
10719 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10720 } IEM_MC_ELSE() {
10721 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10722 } IEM_MC_ENDIF();
10723 IEM_MC_ADVANCE_RIP_AND_FINISH();
10724
10725 IEM_MC_END();
10726}
10727
10728
10729/** Opcode 0xd9 !11/2 mem32real */
10730FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
10731{
10732 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
10733 IEM_MC_BEGIN(3, 3, 0, 0);
10734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10736
10737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10738 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10739 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10740 IEM_MC_PREPARE_FPU_USAGE();
10741
10742 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10743 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10744 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10745
10746 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10747 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10748 IEM_MC_LOCAL(uint16_t, u16Fsw);
10749 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10750 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10751 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10752 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10753 } IEM_MC_ELSE() {
10754 IEM_MC_IF_FCW_IM() {
10755 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10756 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10757 } IEM_MC_ELSE() {
10758 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10759 } IEM_MC_ENDIF();
10760 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10761 } IEM_MC_ENDIF();
10762 IEM_MC_ADVANCE_RIP_AND_FINISH();
10763
10764 IEM_MC_END();
10765}
10766
10767
10768/** Opcode 0xd9 !11/3 */
10769FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
10770{
10771 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
10772 IEM_MC_BEGIN(3, 3, 0, 0);
10773 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10775
10776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10777 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10778 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10779 IEM_MC_PREPARE_FPU_USAGE();
10780
10781 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10782 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10783 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10784
10785 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10786 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10787 IEM_MC_LOCAL(uint16_t, u16Fsw);
10788 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10789 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10790 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10791 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10792 } IEM_MC_ELSE() {
10793 IEM_MC_IF_FCW_IM() {
10794 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10795 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10796 } IEM_MC_ELSE() {
10797 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10798 } IEM_MC_ENDIF();
10799 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10800 } IEM_MC_ENDIF();
10801 IEM_MC_ADVANCE_RIP_AND_FINISH();
10802
10803 IEM_MC_END();
10804}
10805
10806
10807/** Opcode 0xd9 !11/4 */
10808FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
10809{
10810 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
10811 IEM_MC_BEGIN(3, 0, 0, 0);
10812 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
10813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10814
10815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10816 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10817 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10818
10819 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10820 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
10821 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw),
10822 iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
10823 IEM_MC_END();
10824}
10825
10826
10827/** Opcode 0xd9 !11/5 */
10828FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
10829{
10830 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
10831 IEM_MC_BEGIN(1, 1, 0, 0);
10832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10834
10835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10836 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10837 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10838
10839 IEM_MC_ARG(uint16_t, u16Fsw, 0);
10840 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10841
10842 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw),
10843 iemCImpl_fldcw, u16Fsw);
10844 IEM_MC_END();
10845}
10846
10847
10848/** Opcode 0xd9 !11/6 */
10849FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
10850{
10851 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
10852 IEM_MC_BEGIN(3, 0, 0, 0);
10853 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
10854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10855
10856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10857 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10858 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10859
10860 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10861 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
10862 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
10863 iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
10864 IEM_MC_END();
10865}
10866
10867
10868/** Opcode 0xd9 !11/7 */
10869FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
10870{
10871 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
10872 IEM_MC_BEGIN(2, 0, 0, 0);
10873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10874 IEM_MC_LOCAL(uint16_t, u16Fcw);
10875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10877 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10878 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10879 IEM_MC_FETCH_FCW(u16Fcw);
10880 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
10881 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10882 IEM_MC_END();
10883}
10884
10885
10886/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
10887FNIEMOP_DEF(iemOp_fnop)
10888{
10889 IEMOP_MNEMONIC(fnop, "fnop");
10890 IEM_MC_BEGIN(0, 0, 0, 0);
10891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10892 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10893 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10894 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10895 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
10896 * intel optimizations. Investigate. */
10897 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10898 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10899 IEM_MC_END();
10900}
10901
10902
10903/** Opcode 0xd9 11/0 stN */
10904FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
10905{
10906 IEMOP_MNEMONIC(fld_stN, "fld stN");
10907 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10908 * indicates that it does. */
10909 IEM_MC_BEGIN(0, 2, 0, 0);
10910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10911 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10912 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10913 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10914 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10915
10916 IEM_MC_PREPARE_FPU_USAGE();
10917 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
10918 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10919 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10920 } IEM_MC_ELSE() {
10921 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
10922 } IEM_MC_ENDIF();
10923
10924 IEM_MC_ADVANCE_RIP_AND_FINISH();
10925 IEM_MC_END();
10926}
10927
10928
10929/** Opcode 0xd9 11/3 stN */
10930FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
10931{
10932 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
10933 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10934 * indicates that it does. */
10935 IEM_MC_BEGIN(2, 3, 0, 0);
10936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10937 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
10938 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
10939 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10940 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
10941 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
10942 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10943 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10944
10945 IEM_MC_PREPARE_FPU_USAGE();
10946 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10947 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
10948 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
10949 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10950 } IEM_MC_ELSE() {
10951 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
10952 } IEM_MC_ENDIF();
10953
10954 IEM_MC_ADVANCE_RIP_AND_FINISH();
10955 IEM_MC_END();
10956}
10957
10958
10959/** Opcode 0xd9 11/4, 0xdd 11/2. */
10960FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
10961{
10962 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
10963
10964 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
10965 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
10966 if (!iDstReg)
10967 {
10968 IEM_MC_BEGIN(0, 1, 0, 0);
10969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10970 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
10971 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10972 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10973
10974 IEM_MC_PREPARE_FPU_USAGE();
10975 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
10976 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10977 } IEM_MC_ELSE() {
10978 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
10979 } IEM_MC_ENDIF();
10980
10981 IEM_MC_ADVANCE_RIP_AND_FINISH();
10982 IEM_MC_END();
10983 }
10984 else
10985 {
10986 IEM_MC_BEGIN(0, 2, 0, 0);
10987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10988 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10989 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10990 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10991 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10992
10993 IEM_MC_PREPARE_FPU_USAGE();
10994 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10995 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10996 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
10997 } IEM_MC_ELSE() {
10998 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
10999 } IEM_MC_ENDIF();
11000
11001 IEM_MC_ADVANCE_RIP_AND_FINISH();
11002 IEM_MC_END();
11003 }
11004}
11005
11006
11007/**
11008 * Common worker for FPU instructions working on ST0 and replaces it with the
11009 * result, i.e. unary operators.
11010 *
11011 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11012 */
11013FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
11014{
11015 IEM_MC_BEGIN(2, 1, 0, 0);
11016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11017 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11018 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11019 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11020
11021 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11022 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11023 IEM_MC_PREPARE_FPU_USAGE();
11024 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11025 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
11026 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11027 } IEM_MC_ELSE() {
11028 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11029 } IEM_MC_ENDIF();
11030 IEM_MC_ADVANCE_RIP_AND_FINISH();
11031
11032 IEM_MC_END();
11033}
11034
11035
11036/** Opcode 0xd9 0xe0. */
11037FNIEMOP_DEF(iemOp_fchs)
11038{
11039 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
11040 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
11041}
11042
11043
11044/** Opcode 0xd9 0xe1. */
11045FNIEMOP_DEF(iemOp_fabs)
11046{
11047 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
11048 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
11049}
11050
11051
11052/** Opcode 0xd9 0xe4. */
11053FNIEMOP_DEF(iemOp_ftst)
11054{
11055 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
11056 IEM_MC_BEGIN(2, 1, 0, 0);
11057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11058 IEM_MC_LOCAL(uint16_t, u16Fsw);
11059 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11060 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11061
11062 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11063 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11064 IEM_MC_PREPARE_FPU_USAGE();
11065 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11066 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
11067 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11068 } IEM_MC_ELSE() {
11069 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
11070 } IEM_MC_ENDIF();
11071 IEM_MC_ADVANCE_RIP_AND_FINISH();
11072
11073 IEM_MC_END();
11074}
11075
11076
11077/** Opcode 0xd9 0xe5. */
11078FNIEMOP_DEF(iemOp_fxam)
11079{
11080 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
11081 IEM_MC_BEGIN(2, 1, 0, 0);
11082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11083 IEM_MC_LOCAL(uint16_t, u16Fsw);
11084 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11085 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11086
11087 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11088 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11089 IEM_MC_PREPARE_FPU_USAGE();
11090 IEM_MC_REF_FPUREG(pr80Value, 0);
11091 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
11092 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11093 IEM_MC_ADVANCE_RIP_AND_FINISH();
11094
11095 IEM_MC_END();
11096}
11097
11098
11099/**
11100 * Common worker for FPU instructions pushing a constant onto the FPU stack.
11101 *
11102 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11103 */
11104FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
11105{
11106 IEM_MC_BEGIN(1, 1, 0, 0);
11107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11108 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11109 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11110
11111 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11112 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11113 IEM_MC_PREPARE_FPU_USAGE();
11114 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11115 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
11116 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
11117 } IEM_MC_ELSE() {
11118 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
11119 } IEM_MC_ENDIF();
11120 IEM_MC_ADVANCE_RIP_AND_FINISH();
11121
11122 IEM_MC_END();
11123}
11124
11125
11126/** Opcode 0xd9 0xe8. */
11127FNIEMOP_DEF(iemOp_fld1)
11128{
11129 IEMOP_MNEMONIC(fld1, "fld1");
11130 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
11131}
11132
11133
11134/** Opcode 0xd9 0xe9. */
11135FNIEMOP_DEF(iemOp_fldl2t)
11136{
11137 IEMOP_MNEMONIC(fldl2t, "fldl2t");
11138 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
11139}
11140
11141
11142/** Opcode 0xd9 0xea. */
11143FNIEMOP_DEF(iemOp_fldl2e)
11144{
11145 IEMOP_MNEMONIC(fldl2e, "fldl2e");
11146 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
11147}
11148
11149/** Opcode 0xd9 0xeb. */
11150FNIEMOP_DEF(iemOp_fldpi)
11151{
11152 IEMOP_MNEMONIC(fldpi, "fldpi");
11153 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
11154}
11155
11156
11157/** Opcode 0xd9 0xec. */
11158FNIEMOP_DEF(iemOp_fldlg2)
11159{
11160 IEMOP_MNEMONIC(fldlg2, "fldlg2");
11161 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
11162}
11163
11164/** Opcode 0xd9 0xed. */
11165FNIEMOP_DEF(iemOp_fldln2)
11166{
11167 IEMOP_MNEMONIC(fldln2, "fldln2");
11168 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
11169}
11170
11171
11172/** Opcode 0xd9 0xee. */
11173FNIEMOP_DEF(iemOp_fldz)
11174{
11175 IEMOP_MNEMONIC(fldz, "fldz");
11176 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
11177}
11178
11179
11180/** Opcode 0xd9 0xf0.
11181 *
11182 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
11183 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
11184 * to produce proper results for +Inf and -Inf.
11185 *
11186 * This is probably usful in the implementation pow() and similar.
11187 */
11188FNIEMOP_DEF(iemOp_f2xm1)
11189{
11190 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
11191 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
11192}
11193
11194
11195/**
11196 * Common worker for FPU instructions working on STn and ST0, storing the result
11197 * in STn, and popping the stack unless IE, DE or ZE was raised.
11198 *
11199 * @param bRm Mod R/M byte.
11200 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11201 */
11202FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11203{
11204 IEM_MC_BEGIN(3, 1, 0, 0);
11205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11206 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11207 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11208 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11209 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11210
11211 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11212 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11213
11214 IEM_MC_PREPARE_FPU_USAGE();
11215 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
11216 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11217 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11218 } IEM_MC_ELSE() {
11219 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11220 } IEM_MC_ENDIF();
11221 IEM_MC_ADVANCE_RIP_AND_FINISH();
11222
11223 IEM_MC_END();
11224}
11225
11226
11227/** Opcode 0xd9 0xf1. */
11228FNIEMOP_DEF(iemOp_fyl2x)
11229{
11230 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
11231 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
11232}
11233
11234
11235/**
11236 * Common worker for FPU instructions working on ST0 and having two outputs, one
11237 * replacing ST0 and one pushed onto the stack.
11238 *
11239 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11240 */
11241FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
11242{
11243 IEM_MC_BEGIN(2, 1, 0, 0);
11244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11245 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
11246 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
11247 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11248
11249 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11250 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11251 IEM_MC_PREPARE_FPU_USAGE();
11252 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11253 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
11254 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
11255 } IEM_MC_ELSE() {
11256 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
11257 } IEM_MC_ENDIF();
11258 IEM_MC_ADVANCE_RIP_AND_FINISH();
11259
11260 IEM_MC_END();
11261}
11262
11263
11264/** Opcode 0xd9 0xf2. */
11265FNIEMOP_DEF(iemOp_fptan)
11266{
11267 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
11268 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
11269}
11270
11271
11272/** Opcode 0xd9 0xf3. */
11273FNIEMOP_DEF(iemOp_fpatan)
11274{
11275 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
11276 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
11277}
11278
11279
11280/** Opcode 0xd9 0xf4. */
11281FNIEMOP_DEF(iemOp_fxtract)
11282{
11283 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
11284 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
11285}
11286
11287
11288/** Opcode 0xd9 0xf5. */
11289FNIEMOP_DEF(iemOp_fprem1)
11290{
11291 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
11292 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
11293}
11294
11295
11296/** Opcode 0xd9 0xf6. */
11297FNIEMOP_DEF(iemOp_fdecstp)
11298{
11299 IEMOP_MNEMONIC(fdecstp, "fdecstp");
11300 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
11301 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
11302 * FINCSTP and FDECSTP. */
11303 IEM_MC_BEGIN(0, 0, 0, 0);
11304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11305
11306 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11307 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11308
11309 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11310 IEM_MC_FPU_STACK_DEC_TOP();
11311 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
11312
11313 IEM_MC_ADVANCE_RIP_AND_FINISH();
11314 IEM_MC_END();
11315}
11316
11317
11318/** Opcode 0xd9 0xf7. */
11319FNIEMOP_DEF(iemOp_fincstp)
11320{
11321 IEMOP_MNEMONIC(fincstp, "fincstp");
11322 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
11323 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
11324 * FINCSTP and FDECSTP. */
11325 IEM_MC_BEGIN(0, 0, 0, 0);
11326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11327
11328 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11329 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11330
11331 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11332 IEM_MC_FPU_STACK_INC_TOP();
11333 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
11334
11335 IEM_MC_ADVANCE_RIP_AND_FINISH();
11336 IEM_MC_END();
11337}
11338
11339
11340/** Opcode 0xd9 0xf8. */
11341FNIEMOP_DEF(iemOp_fprem)
11342{
11343 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
11344 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
11345}
11346
11347
11348/** Opcode 0xd9 0xf9. */
11349FNIEMOP_DEF(iemOp_fyl2xp1)
11350{
11351 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
11352 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
11353}
11354
11355
11356/** Opcode 0xd9 0xfa. */
11357FNIEMOP_DEF(iemOp_fsqrt)
11358{
11359 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
11360 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
11361}
11362
11363
11364/** Opcode 0xd9 0xfb. */
11365FNIEMOP_DEF(iemOp_fsincos)
11366{
11367 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
11368 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
11369}
11370
11371
11372/** Opcode 0xd9 0xfc. */
11373FNIEMOP_DEF(iemOp_frndint)
11374{
11375 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
11376 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
11377}
11378
11379
11380/** Opcode 0xd9 0xfd. */
11381FNIEMOP_DEF(iemOp_fscale)
11382{
11383 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
11384 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
11385}
11386
11387
11388/** Opcode 0xd9 0xfe. */
11389FNIEMOP_DEF(iemOp_fsin)
11390{
11391 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
11392 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
11393}
11394
11395
11396/** Opcode 0xd9 0xff. */
11397FNIEMOP_DEF(iemOp_fcos)
11398{
11399 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
11400 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
11401}
11402
11403
11404/** Used by iemOp_EscF1. */
11405IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
11406{
11407 /* 0xe0 */ iemOp_fchs,
11408 /* 0xe1 */ iemOp_fabs,
11409 /* 0xe2 */ iemOp_Invalid,
11410 /* 0xe3 */ iemOp_Invalid,
11411 /* 0xe4 */ iemOp_ftst,
11412 /* 0xe5 */ iemOp_fxam,
11413 /* 0xe6 */ iemOp_Invalid,
11414 /* 0xe7 */ iemOp_Invalid,
11415 /* 0xe8 */ iemOp_fld1,
11416 /* 0xe9 */ iemOp_fldl2t,
11417 /* 0xea */ iemOp_fldl2e,
11418 /* 0xeb */ iemOp_fldpi,
11419 /* 0xec */ iemOp_fldlg2,
11420 /* 0xed */ iemOp_fldln2,
11421 /* 0xee */ iemOp_fldz,
11422 /* 0xef */ iemOp_Invalid,
11423 /* 0xf0 */ iemOp_f2xm1,
11424 /* 0xf1 */ iemOp_fyl2x,
11425 /* 0xf2 */ iemOp_fptan,
11426 /* 0xf3 */ iemOp_fpatan,
11427 /* 0xf4 */ iemOp_fxtract,
11428 /* 0xf5 */ iemOp_fprem1,
11429 /* 0xf6 */ iemOp_fdecstp,
11430 /* 0xf7 */ iemOp_fincstp,
11431 /* 0xf8 */ iemOp_fprem,
11432 /* 0xf9 */ iemOp_fyl2xp1,
11433 /* 0xfa */ iemOp_fsqrt,
11434 /* 0xfb */ iemOp_fsincos,
11435 /* 0xfc */ iemOp_frndint,
11436 /* 0xfd */ iemOp_fscale,
11437 /* 0xfe */ iemOp_fsin,
11438 /* 0xff */ iemOp_fcos
11439};
11440
11441
11442/**
11443 * @opcode 0xd9
11444 */
11445FNIEMOP_DEF(iemOp_EscF1)
11446{
11447 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11448 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
11449
11450 if (IEM_IS_MODRM_REG_MODE(bRm))
11451 {
11452 switch (IEM_GET_MODRM_REG_8(bRm))
11453 {
11454 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
11455 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
11456 case 2:
11457 if (bRm == 0xd0)
11458 return FNIEMOP_CALL(iemOp_fnop);
11459 IEMOP_RAISE_INVALID_OPCODE_RET();
11460 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
11461 case 4:
11462 case 5:
11463 case 6:
11464 case 7:
11465 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
11466 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
11467 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11468 }
11469 }
11470 else
11471 {
11472 switch (IEM_GET_MODRM_REG_8(bRm))
11473 {
11474 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
11475 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
11476 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
11477 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
11478 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
11479 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
11480 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
11481 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
11482 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11483 }
11484 }
11485}
11486
11487
11488/** Opcode 0xda 11/0. */
11489FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
11490{
11491 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
11492 IEM_MC_BEGIN(0, 1, 0, 0);
11493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11494 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11495
11496 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11497 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11498
11499 IEM_MC_PREPARE_FPU_USAGE();
11500 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11501 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
11502 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11503 } IEM_MC_ENDIF();
11504 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11505 } IEM_MC_ELSE() {
11506 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11507 } IEM_MC_ENDIF();
11508 IEM_MC_ADVANCE_RIP_AND_FINISH();
11509
11510 IEM_MC_END();
11511}
11512
11513
11514/** Opcode 0xda 11/1. */
11515FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
11516{
11517 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
11518 IEM_MC_BEGIN(0, 1, 0, 0);
11519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11520 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11521
11522 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11523 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11524
11525 IEM_MC_PREPARE_FPU_USAGE();
11526 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11527 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
11528 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11529 } IEM_MC_ENDIF();
11530 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11531 } IEM_MC_ELSE() {
11532 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11533 } IEM_MC_ENDIF();
11534 IEM_MC_ADVANCE_RIP_AND_FINISH();
11535
11536 IEM_MC_END();
11537}
11538
11539
11540/** Opcode 0xda 11/2. */
11541FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
11542{
11543 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
11544 IEM_MC_BEGIN(0, 1, 0, 0);
11545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11546 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11547
11548 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11549 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11550
11551 IEM_MC_PREPARE_FPU_USAGE();
11552 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11553 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11554 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11555 } IEM_MC_ENDIF();
11556 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11557 } IEM_MC_ELSE() {
11558 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11559 } IEM_MC_ENDIF();
11560 IEM_MC_ADVANCE_RIP_AND_FINISH();
11561
11562 IEM_MC_END();
11563}
11564
11565
11566/** Opcode 0xda 11/3. */
11567FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
11568{
11569 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
11570 IEM_MC_BEGIN(0, 1, 0, 0);
11571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11572 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11573
11574 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11575 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11576
11577 IEM_MC_PREPARE_FPU_USAGE();
11578 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11579 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
11580 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11581 } IEM_MC_ENDIF();
11582 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11583 } IEM_MC_ELSE() {
11584 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11585 } IEM_MC_ENDIF();
11586 IEM_MC_ADVANCE_RIP_AND_FINISH();
11587
11588 IEM_MC_END();
11589}
11590
11591
11592/**
11593 * Common worker for FPU instructions working on ST0 and ST1, only affecting
11594 * flags, and popping twice when done.
11595 *
11596 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11597 */
11598FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
11599{
11600 IEM_MC_BEGIN(3, 1, 0, 0);
11601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11602 IEM_MC_LOCAL(uint16_t, u16Fsw);
11603 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11604 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11605 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11606
11607 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11608 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11609
11610 IEM_MC_PREPARE_FPU_USAGE();
11611 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
11612 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
11613 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11614 } IEM_MC_ELSE() {
11615 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
11616 } IEM_MC_ENDIF();
11617 IEM_MC_ADVANCE_RIP_AND_FINISH();
11618
11619 IEM_MC_END();
11620}
11621
11622
11623/** Opcode 0xda 0xe9. */
11624FNIEMOP_DEF(iemOp_fucompp)
11625{
11626 IEMOP_MNEMONIC(fucompp, "fucompp");
11627 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
11628}
11629
11630
11631/**
11632 * Common worker for FPU instructions working on ST0 and an m32i, and storing
11633 * the result in ST0.
11634 *
11635 * @param bRm Mod R/M byte.
11636 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11637 */
11638FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
11639{
11640 IEM_MC_BEGIN(3, 3, 0, 0);
11641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11642 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11643 IEM_MC_LOCAL(int32_t, i32Val2);
11644 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11645 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11646 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11647
11648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11650
11651 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11652 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11653 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11654
11655 IEM_MC_PREPARE_FPU_USAGE();
11656 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11657 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
11658 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11659 } IEM_MC_ELSE() {
11660 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11661 } IEM_MC_ENDIF();
11662 IEM_MC_ADVANCE_RIP_AND_FINISH();
11663
11664 IEM_MC_END();
11665}
11666
11667
11668/** Opcode 0xda !11/0. */
11669FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
11670{
11671 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
11672 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
11673}
11674
11675
11676/** Opcode 0xda !11/1. */
11677FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
11678{
11679 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
11680 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
11681}
11682
11683
11684/** Opcode 0xda !11/2. */
11685FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
11686{
11687 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
11688
11689 IEM_MC_BEGIN(3, 3, 0, 0);
11690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11691 IEM_MC_LOCAL(uint16_t, u16Fsw);
11692 IEM_MC_LOCAL(int32_t, i32Val2);
11693 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11694 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11695 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11696
11697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11699
11700 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11701 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11702 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11703
11704 IEM_MC_PREPARE_FPU_USAGE();
11705 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11706 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11707 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11708 } IEM_MC_ELSE() {
11709 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11710 } IEM_MC_ENDIF();
11711 IEM_MC_ADVANCE_RIP_AND_FINISH();
11712
11713 IEM_MC_END();
11714}
11715
11716
11717/** Opcode 0xda !11/3. */
11718FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
11719{
11720 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
11721
11722 IEM_MC_BEGIN(3, 3, 0, 0);
11723 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11724 IEM_MC_LOCAL(uint16_t, u16Fsw);
11725 IEM_MC_LOCAL(int32_t, i32Val2);
11726 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11727 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11728 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11729
11730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11732
11733 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11734 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11735 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11736
11737 IEM_MC_PREPARE_FPU_USAGE();
11738 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11739 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11740 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11741 } IEM_MC_ELSE() {
11742 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11743 } IEM_MC_ENDIF();
11744 IEM_MC_ADVANCE_RIP_AND_FINISH();
11745
11746 IEM_MC_END();
11747}
11748
11749
11750/** Opcode 0xda !11/4. */
11751FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
11752{
11753 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
11754 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
11755}
11756
11757
11758/** Opcode 0xda !11/5. */
11759FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
11760{
11761 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
11762 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
11763}
11764
11765
11766/** Opcode 0xda !11/6. */
11767FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
11768{
11769 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
11770 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
11771}
11772
11773
11774/** Opcode 0xda !11/7. */
11775FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
11776{
11777 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
11778 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
11779}
11780
11781
11782/**
11783 * @opcode 0xda
11784 */
11785FNIEMOP_DEF(iemOp_EscF2)
11786{
11787 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11788 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
11789 if (IEM_IS_MODRM_REG_MODE(bRm))
11790 {
11791 switch (IEM_GET_MODRM_REG_8(bRm))
11792 {
11793 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
11794 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
11795 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
11796 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
11797 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11798 case 5:
11799 if (bRm == 0xe9)
11800 return FNIEMOP_CALL(iemOp_fucompp);
11801 IEMOP_RAISE_INVALID_OPCODE_RET();
11802 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11803 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11805 }
11806 }
11807 else
11808 {
11809 switch (IEM_GET_MODRM_REG_8(bRm))
11810 {
11811 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
11812 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
11813 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
11814 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
11815 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
11816 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
11817 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
11818 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
11819 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11820 }
11821 }
11822}
11823
11824
11825/** Opcode 0xdb !11/0. */
11826FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
11827{
11828 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
11829
11830 IEM_MC_BEGIN(2, 3, 0, 0);
11831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11832 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11833 IEM_MC_LOCAL(int32_t, i32Val);
11834 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11835 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
11836
11837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11839
11840 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11841 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11842 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11843
11844 IEM_MC_PREPARE_FPU_USAGE();
11845 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11846 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
11847 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11848 } IEM_MC_ELSE() {
11849 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11850 } IEM_MC_ENDIF();
11851 IEM_MC_ADVANCE_RIP_AND_FINISH();
11852
11853 IEM_MC_END();
11854}
11855
11856
11857/** Opcode 0xdb !11/1. */
11858FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
11859{
11860 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
11861 IEM_MC_BEGIN(3, 3, 0, 0);
11862 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11864
11865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11866 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11867 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11868 IEM_MC_PREPARE_FPU_USAGE();
11869
11870 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11871 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11872 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11873
11874 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11875 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11876 IEM_MC_LOCAL(uint16_t, u16Fsw);
11877 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11878 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11879 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11880 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11881 } IEM_MC_ELSE() {
11882 IEM_MC_IF_FCW_IM() {
11883 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11884 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11885 } IEM_MC_ELSE() {
11886 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11887 } IEM_MC_ENDIF();
11888 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11889 } IEM_MC_ENDIF();
11890 IEM_MC_ADVANCE_RIP_AND_FINISH();
11891
11892 IEM_MC_END();
11893}
11894
11895
11896/** Opcode 0xdb !11/2. */
11897FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
11898{
11899 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
11900 IEM_MC_BEGIN(3, 3, 0, 0);
11901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11903
11904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11905 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11906 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11907 IEM_MC_PREPARE_FPU_USAGE();
11908
11909 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11910 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11911 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11912
11913 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11914 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11915 IEM_MC_LOCAL(uint16_t, u16Fsw);
11916 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11917 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11918 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11919 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11920 } IEM_MC_ELSE() {
11921 IEM_MC_IF_FCW_IM() {
11922 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11923 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11924 } IEM_MC_ELSE() {
11925 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11926 } IEM_MC_ENDIF();
11927 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11928 } IEM_MC_ENDIF();
11929 IEM_MC_ADVANCE_RIP_AND_FINISH();
11930
11931 IEM_MC_END();
11932}
11933
11934
11935/** Opcode 0xdb !11/3. */
11936FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
11937{
11938 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
11939 IEM_MC_BEGIN(3, 2, 0, 0);
11940 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11942
11943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11944 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11945 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11946 IEM_MC_PREPARE_FPU_USAGE();
11947
11948 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11949 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11950 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11951
11952 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11953 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11954 IEM_MC_LOCAL(uint16_t, u16Fsw);
11955 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11956 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11957 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11958 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11959 } IEM_MC_ELSE() {
11960 IEM_MC_IF_FCW_IM() {
11961 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11962 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11963 } IEM_MC_ELSE() {
11964 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11965 } IEM_MC_ENDIF();
11966 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11967 } IEM_MC_ENDIF();
11968 IEM_MC_ADVANCE_RIP_AND_FINISH();
11969
11970 IEM_MC_END();
11971}
11972
11973
11974/** Opcode 0xdb !11/5. */
11975FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
11976{
11977 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
11978
11979 IEM_MC_BEGIN(2, 3, 0, 0);
11980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11981 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11982 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
11983 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11984 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
11985
11986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11988
11989 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11990 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11991 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11992
11993 IEM_MC_PREPARE_FPU_USAGE();
11994 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11995 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
11996 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11997 } IEM_MC_ELSE() {
11998 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11999 } IEM_MC_ENDIF();
12000 IEM_MC_ADVANCE_RIP_AND_FINISH();
12001
12002 IEM_MC_END();
12003}
12004
12005
12006/** Opcode 0xdb !11/7. */
12007FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
12008{
12009 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
12010 IEM_MC_BEGIN(3, 3, 0, 0);
12011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12013
12014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12015 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12016 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12017 IEM_MC_PREPARE_FPU_USAGE();
12018
12019 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12020 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
12021 IEM_MC_MEM_MAP_R80_WO(pr80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12022
12023 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12024 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12025 IEM_MC_LOCAL(uint16_t, u16Fsw);
12026 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12027 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
12028 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12029 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12030 } IEM_MC_ELSE() {
12031 IEM_MC_IF_FCW_IM() {
12032 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
12033 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12034 } IEM_MC_ELSE() {
12035 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12036 } IEM_MC_ENDIF();
12037 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12038 } IEM_MC_ENDIF();
12039 IEM_MC_ADVANCE_RIP_AND_FINISH();
12040
12041 IEM_MC_END();
12042}
12043
12044
12045/** Opcode 0xdb 11/0. */
12046FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
12047{
12048 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
12049 IEM_MC_BEGIN(0, 1, 0, 0);
12050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12051 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12052
12053 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12054 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12055
12056 IEM_MC_PREPARE_FPU_USAGE();
12057 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
12058 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
12059 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12060 } IEM_MC_ENDIF();
12061 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12062 } IEM_MC_ELSE() {
12063 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12064 } IEM_MC_ENDIF();
12065 IEM_MC_ADVANCE_RIP_AND_FINISH();
12066
12067 IEM_MC_END();
12068}
12069
12070
12071/** Opcode 0xdb 11/1. */
12072FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
12073{
12074 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
12075 IEM_MC_BEGIN(0, 1, 0, 0);
12076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12077 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12078
12079 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12080 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12081
12082 IEM_MC_PREPARE_FPU_USAGE();
12083 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
12084 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12085 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12086 } IEM_MC_ENDIF();
12087 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12088 } IEM_MC_ELSE() {
12089 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12090 } IEM_MC_ENDIF();
12091 IEM_MC_ADVANCE_RIP_AND_FINISH();
12092
12093 IEM_MC_END();
12094}
12095
12096
12097/** Opcode 0xdb 11/2. */
12098FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
12099{
12100 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
12101 IEM_MC_BEGIN(0, 1, 0, 0);
12102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12103 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12104
12105 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12106 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12107
12108 IEM_MC_PREPARE_FPU_USAGE();
12109 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
12110 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
12111 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12112 } IEM_MC_ENDIF();
12113 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12114 } IEM_MC_ELSE() {
12115 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12116 } IEM_MC_ENDIF();
12117 IEM_MC_ADVANCE_RIP_AND_FINISH();
12118
12119 IEM_MC_END();
12120}
12121
12122
12123/** Opcode 0xdb 11/3. */
12124FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
12125{
12126 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
12127 IEM_MC_BEGIN(0, 1, 0, 0);
12128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12129 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12130
12131 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12132 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12133
12134 IEM_MC_PREPARE_FPU_USAGE();
12135 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
12136 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
12137 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12138 } IEM_MC_ENDIF();
12139 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12140 } IEM_MC_ELSE() {
12141 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12142 } IEM_MC_ENDIF();
12143 IEM_MC_ADVANCE_RIP_AND_FINISH();
12144
12145 IEM_MC_END();
12146}
12147
12148
12149/** Opcode 0xdb 0xe0. */
12150FNIEMOP_DEF(iemOp_fneni)
12151{
12152 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
12153 IEM_MC_BEGIN(0, 0, 0, 0);
12154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12155 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12156 IEM_MC_ADVANCE_RIP_AND_FINISH();
12157 IEM_MC_END();
12158}
12159
12160
12161/** Opcode 0xdb 0xe1. */
12162FNIEMOP_DEF(iemOp_fndisi)
12163{
12164 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
12165 IEM_MC_BEGIN(0, 0, 0, 0);
12166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12167 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12168 IEM_MC_ADVANCE_RIP_AND_FINISH();
12169 IEM_MC_END();
12170}
12171
12172
12173/** Opcode 0xdb 0xe2. */
12174FNIEMOP_DEF(iemOp_fnclex)
12175{
12176 IEMOP_MNEMONIC(fnclex, "fnclex");
12177 IEM_MC_BEGIN(0, 0, 0, 0);
12178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12179 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12180 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12181 IEM_MC_CLEAR_FSW_EX();
12182 IEM_MC_ADVANCE_RIP_AND_FINISH();
12183 IEM_MC_END();
12184}
12185
12186
12187/** Opcode 0xdb 0xe3. */
12188FNIEMOP_DEF(iemOp_fninit)
12189{
12190 IEMOP_MNEMONIC(fninit, "fninit");
12191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12192 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12193 iemCImpl_finit, false /*fCheckXcpts*/);
12194}
12195
12196
12197/** Opcode 0xdb 0xe4. */
12198FNIEMOP_DEF(iemOp_fnsetpm)
12199{
12200 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
12201 IEM_MC_BEGIN(0, 0, 0, 0);
12202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12203 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12204 IEM_MC_ADVANCE_RIP_AND_FINISH();
12205 IEM_MC_END();
12206}
12207
12208
12209/** Opcode 0xdb 0xe5. */
12210FNIEMOP_DEF(iemOp_frstpm)
12211{
12212 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
12213#if 0 /* #UDs on newer CPUs */
12214 IEM_MC_BEGIN(0, 0, 0, 0);
12215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12216 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12217 IEM_MC_ADVANCE_RIP_AND_FINISH();
12218 IEM_MC_END();
12219 return VINF_SUCCESS;
12220#else
12221 IEMOP_RAISE_INVALID_OPCODE_RET();
12222#endif
12223}
12224
12225
12226/** Opcode 0xdb 11/5. */
12227FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
12228{
12229 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
12230 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12231 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
12232 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12233}
12234
12235
12236/** Opcode 0xdb 11/6. */
12237FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
12238{
12239 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
12240 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12241 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12242 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12243}
12244
12245
12246/**
12247 * @opcode 0xdb
12248 */
12249FNIEMOP_DEF(iemOp_EscF3)
12250{
12251 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12252 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
12253 if (IEM_IS_MODRM_REG_MODE(bRm))
12254 {
12255 switch (IEM_GET_MODRM_REG_8(bRm))
12256 {
12257 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
12258 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
12259 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
12260 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
12261 case 4:
12262 switch (bRm)
12263 {
12264 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
12265 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
12266 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
12267 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
12268 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
12269 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
12270 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
12271 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
12272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12273 }
12274 break;
12275 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
12276 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
12277 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12278 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12279 }
12280 }
12281 else
12282 {
12283 switch (IEM_GET_MODRM_REG_8(bRm))
12284 {
12285 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
12286 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
12287 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
12288 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
12289 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
12290 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
12291 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
12292 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
12293 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12294 }
12295 }
12296}
12297
12298
12299/**
12300 * Common worker for FPU instructions working on STn and ST0, and storing the
12301 * result in STn unless IE, DE or ZE was raised.
12302 *
12303 * @param bRm Mod R/M byte.
12304 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12305 */
12306FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
12307{
12308 IEM_MC_BEGIN(3, 1, 0, 0);
12309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12310 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12311 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12312 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12313 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12314
12315 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12316 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12317
12318 IEM_MC_PREPARE_FPU_USAGE();
12319 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
12320 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
12321 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12322 } IEM_MC_ELSE() {
12323 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12324 } IEM_MC_ENDIF();
12325 IEM_MC_ADVANCE_RIP_AND_FINISH();
12326
12327 IEM_MC_END();
12328}
12329
12330
12331/** Opcode 0xdc 11/0. */
12332FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
12333{
12334 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
12335 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
12336}
12337
12338
12339/** Opcode 0xdc 11/1. */
12340FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
12341{
12342 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
12343 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
12344}
12345
12346
12347/** Opcode 0xdc 11/4. */
12348FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
12349{
12350 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
12351 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
12352}
12353
12354
12355/** Opcode 0xdc 11/5. */
12356FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
12357{
12358 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
12359 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
12360}
12361
12362
12363/** Opcode 0xdc 11/6. */
12364FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
12365{
12366 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
12367 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
12368}
12369
12370
12371/** Opcode 0xdc 11/7. */
12372FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
12373{
12374 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
12375 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
12376}
12377
12378
12379/**
12380 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
12381 * memory operand, and storing the result in ST0.
12382 *
12383 * @param bRm Mod R/M byte.
12384 * @param pfnImpl Pointer to the instruction implementation (assembly).
12385 */
12386FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
12387{
12388 IEM_MC_BEGIN(3, 3, 0, 0);
12389 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12390 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12391 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
12392 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12393 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
12394 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
12395
12396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12398 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12399 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12400
12401 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12402 IEM_MC_PREPARE_FPU_USAGE();
12403 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
12404 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
12405 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12406 } IEM_MC_ELSE() {
12407 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12408 } IEM_MC_ENDIF();
12409 IEM_MC_ADVANCE_RIP_AND_FINISH();
12410
12411 IEM_MC_END();
12412}
12413
12414
12415/** Opcode 0xdc !11/0. */
12416FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
12417{
12418 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
12419 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
12420}
12421
12422
12423/** Opcode 0xdc !11/1. */
12424FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
12425{
12426 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
12427 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
12428}
12429
12430
12431/** Opcode 0xdc !11/2. */
12432FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
12433{
12434 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
12435
12436 IEM_MC_BEGIN(3, 3, 0, 0);
12437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12438 IEM_MC_LOCAL(uint16_t, u16Fsw);
12439 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
12440 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12441 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12442 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
12443
12444 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12446
12447 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12448 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12449 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12450
12451 IEM_MC_PREPARE_FPU_USAGE();
12452 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12453 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
12454 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12455 } IEM_MC_ELSE() {
12456 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12457 } IEM_MC_ENDIF();
12458 IEM_MC_ADVANCE_RIP_AND_FINISH();
12459
12460 IEM_MC_END();
12461}
12462
12463
12464/** Opcode 0xdc !11/3. */
12465FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
12466{
12467 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
12468
12469 IEM_MC_BEGIN(3, 3, 0, 0);
12470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12471 IEM_MC_LOCAL(uint16_t, u16Fsw);
12472 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
12473 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12474 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12475 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
12476
12477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12479
12480 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12481 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12482 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12483
12484 IEM_MC_PREPARE_FPU_USAGE();
12485 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12486 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
12487 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12488 } IEM_MC_ELSE() {
12489 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12490 } IEM_MC_ENDIF();
12491 IEM_MC_ADVANCE_RIP_AND_FINISH();
12492
12493 IEM_MC_END();
12494}
12495
12496
12497/** Opcode 0xdc !11/4. */
12498FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
12499{
12500 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
12501 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
12502}
12503
12504
12505/** Opcode 0xdc !11/5. */
12506FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
12507{
12508 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
12509 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
12510}
12511
12512
12513/** Opcode 0xdc !11/6. */
12514FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
12515{
12516 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
12517 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
12518}
12519
12520
12521/** Opcode 0xdc !11/7. */
12522FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
12523{
12524 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
12525 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
12526}
12527
12528
12529/**
12530 * @opcode 0xdc
12531 */
12532FNIEMOP_DEF(iemOp_EscF4)
12533{
12534 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12535 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
12536 if (IEM_IS_MODRM_REG_MODE(bRm))
12537 {
12538 switch (IEM_GET_MODRM_REG_8(bRm))
12539 {
12540 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
12541 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
12542 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
12543 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
12544 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
12545 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
12546 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
12547 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
12548 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12549 }
12550 }
12551 else
12552 {
12553 switch (IEM_GET_MODRM_REG_8(bRm))
12554 {
12555 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
12556 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
12557 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
12558 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
12559 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
12560 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
12561 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
12562 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
12563 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12564 }
12565 }
12566}
12567
12568
12569/** Opcode 0xdd !11/0.
12570 * @sa iemOp_fld_m32r */
12571FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
12572{
12573 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
12574
12575 IEM_MC_BEGIN(2, 3, 0, 0);
12576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12577 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12578 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
12579 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12580 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
12581
12582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12584 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12585 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12586
12587 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12588 IEM_MC_PREPARE_FPU_USAGE();
12589 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12590 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
12591 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12592 } IEM_MC_ELSE() {
12593 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12594 } IEM_MC_ENDIF();
12595 IEM_MC_ADVANCE_RIP_AND_FINISH();
12596
12597 IEM_MC_END();
12598}
12599
12600
12601/** Opcode 0xdd !11/0. */
12602FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
12603{
12604 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
12605 IEM_MC_BEGIN(3, 3, 0, 0);
12606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12607 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12608
12609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12610 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12611 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12612 IEM_MC_PREPARE_FPU_USAGE();
12613
12614 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12615 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12616 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12617
12618 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12619 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12620 IEM_MC_LOCAL(uint16_t, u16Fsw);
12621 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12622 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12623 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12624 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12625 } IEM_MC_ELSE() {
12626 IEM_MC_IF_FCW_IM() {
12627 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12628 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12629 } IEM_MC_ELSE() {
12630 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12631 } IEM_MC_ENDIF();
12632 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12633 } IEM_MC_ENDIF();
12634 IEM_MC_ADVANCE_RIP_AND_FINISH();
12635
12636 IEM_MC_END();
12637}
12638
12639
12640/** Opcode 0xdd !11/0. */
12641FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
12642{
12643 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
12644 IEM_MC_BEGIN(3, 3, 0, 0);
12645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12647
12648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12649 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12650 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12651 IEM_MC_PREPARE_FPU_USAGE();
12652
12653 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12654 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12655 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12656
12657 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12658 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12659 IEM_MC_LOCAL(uint16_t, u16Fsw);
12660 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12661 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12662 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12663 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12664 } IEM_MC_ELSE() {
12665 IEM_MC_IF_FCW_IM() {
12666 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12667 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12668 } IEM_MC_ELSE() {
12669 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12670 } IEM_MC_ENDIF();
12671 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12672 } IEM_MC_ENDIF();
12673 IEM_MC_ADVANCE_RIP_AND_FINISH();
12674
12675 IEM_MC_END();
12676}
12677
12678
12679
12680
12681/** Opcode 0xdd !11/0. */
12682FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
12683{
12684 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
12685 IEM_MC_BEGIN(3, 3, 0, 0);
12686 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12688
12689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12690 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12691 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12692 IEM_MC_PREPARE_FPU_USAGE();
12693
12694 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12695 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12696 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12697
12698 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12699 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12700 IEM_MC_LOCAL(uint16_t, u16Fsw);
12701 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12702 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12703 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12704 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12705 } IEM_MC_ELSE() {
12706 IEM_MC_IF_FCW_IM() {
12707 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12708 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12709 } IEM_MC_ELSE() {
12710 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12711 } IEM_MC_ENDIF();
12712 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12713 } IEM_MC_ENDIF();
12714 IEM_MC_ADVANCE_RIP_AND_FINISH();
12715
12716 IEM_MC_END();
12717}
12718
12719
12720/** Opcode 0xdd !11/0. */
12721FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
12722{
12723 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
12724 IEM_MC_BEGIN(3, 0, 0, 0);
12725 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
12726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12727
12728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12729 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12730 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12731
12732 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12733 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12734 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12735 iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
12736 IEM_MC_END();
12737}
12738
12739
12740/** Opcode 0xdd !11/0. */
12741FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
12742{
12743 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
12744 IEM_MC_BEGIN(3, 0, 0, 0);
12745 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
12746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12747
12748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12749 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12750 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
12751
12752 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12753 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12754 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12755 iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
12756 IEM_MC_END();
12757}
12758
12759/** Opcode 0xdd !11/0. */
12760FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
12761{
12762 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
12763
12764 IEM_MC_BEGIN(0, 2, 0, 0);
12765 IEM_MC_LOCAL(uint16_t, u16Tmp);
12766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12767
12768 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12770 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12771
12772 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12773 IEM_MC_FETCH_FSW(u16Tmp);
12774 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
12775 IEM_MC_ADVANCE_RIP_AND_FINISH();
12776
12777/** @todo Debug / drop a hint to the verifier that things may differ
12778 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
12779 * NT4SP1. (X86_FSW_PE) */
12780 IEM_MC_END();
12781}
12782
12783
12784/** Opcode 0xdd 11/0. */
12785FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
12786{
12787 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
12788 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
12789 unmodified. */
12790 IEM_MC_BEGIN(0, 0, 0, 0);
12791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12792
12793 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12794 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12795
12796 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12797 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12798 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12799
12800 IEM_MC_ADVANCE_RIP_AND_FINISH();
12801 IEM_MC_END();
12802}
12803
12804
12805/** Opcode 0xdd 11/1. */
12806FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
12807{
12808 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
12809 IEM_MC_BEGIN(0, 2, 0, 0);
12810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12811 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
12812 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12813 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12814 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12815
12816 IEM_MC_PREPARE_FPU_USAGE();
12817 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12818 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
12819 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12820 } IEM_MC_ELSE() {
12821 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12822 } IEM_MC_ENDIF();
12823
12824 IEM_MC_ADVANCE_RIP_AND_FINISH();
12825 IEM_MC_END();
12826}
12827
12828
12829/** Opcode 0xdd 11/3. */
12830FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
12831{
12832 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
12833 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
12834}
12835
12836
12837/** Opcode 0xdd 11/4. */
12838FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
12839{
12840 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
12841 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
12842}
12843
12844
12845/**
12846 * @opcode 0xdd
12847 */
12848FNIEMOP_DEF(iemOp_EscF5)
12849{
12850 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12851 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
12852 if (IEM_IS_MODRM_REG_MODE(bRm))
12853 {
12854 switch (IEM_GET_MODRM_REG_8(bRm))
12855 {
12856 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
12857 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
12858 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
12859 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
12860 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
12861 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
12862 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
12863 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12864 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12865 }
12866 }
12867 else
12868 {
12869 switch (IEM_GET_MODRM_REG_8(bRm))
12870 {
12871 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
12872 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
12873 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
12874 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
12875 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
12876 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
12877 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
12878 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
12879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12880 }
12881 }
12882}
12883
12884
12885/** Opcode 0xde 11/0. */
12886FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
12887{
12888 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
12889 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
12890}
12891
12892
12893/** Opcode 0xde 11/0. */
12894FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
12895{
12896 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
12897 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
12898}
12899
12900
12901/** Opcode 0xde 0xd9. */
12902FNIEMOP_DEF(iemOp_fcompp)
12903{
12904 IEMOP_MNEMONIC(fcompp, "fcompp");
12905 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
12906}
12907
12908
12909/** Opcode 0xde 11/4. */
12910FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
12911{
12912 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
12913 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
12914}
12915
12916
12917/** Opcode 0xde 11/5. */
12918FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
12919{
12920 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
12921 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
12922}
12923
12924
12925/** Opcode 0xde 11/6. */
12926FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
12927{
12928 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
12929 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
12930}
12931
12932
12933/** Opcode 0xde 11/7. */
12934FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
12935{
12936 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
12937 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
12938}
12939
12940
12941/**
12942 * Common worker for FPU instructions working on ST0 and an m16i, and storing
12943 * the result in ST0.
12944 *
12945 * @param bRm Mod R/M byte.
12946 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12947 */
12948FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
12949{
12950 IEM_MC_BEGIN(3, 3, 0, 0);
12951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12952 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12953 IEM_MC_LOCAL(int16_t, i16Val2);
12954 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12955 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12956 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12957
12958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12960
12961 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12962 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12963 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12964
12965 IEM_MC_PREPARE_FPU_USAGE();
12966 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12967 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
12968 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
12969 } IEM_MC_ELSE() {
12970 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12971 } IEM_MC_ENDIF();
12972 IEM_MC_ADVANCE_RIP_AND_FINISH();
12973
12974 IEM_MC_END();
12975}
12976
12977
12978/** Opcode 0xde !11/0. */
12979FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
12980{
12981 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
12982 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
12983}
12984
12985
12986/** Opcode 0xde !11/1. */
12987FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
12988{
12989 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
12990 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
12991}
12992
12993
12994/** Opcode 0xde !11/2. */
12995FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
12996{
12997 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
12998
12999 IEM_MC_BEGIN(3, 3, 0, 0);
13000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13001 IEM_MC_LOCAL(uint16_t, u16Fsw);
13002 IEM_MC_LOCAL(int16_t, i16Val2);
13003 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13004 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13005 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
13006
13007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13009
13010 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13011 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13012 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13013
13014 IEM_MC_PREPARE_FPU_USAGE();
13015 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
13016 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
13017 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13018 } IEM_MC_ELSE() {
13019 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13020 } IEM_MC_ENDIF();
13021 IEM_MC_ADVANCE_RIP_AND_FINISH();
13022
13023 IEM_MC_END();
13024}
13025
13026
13027/** Opcode 0xde !11/3. */
13028FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
13029{
13030 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
13031
13032 IEM_MC_BEGIN(3, 3, 0, 0);
13033 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13034 IEM_MC_LOCAL(uint16_t, u16Fsw);
13035 IEM_MC_LOCAL(int16_t, i16Val2);
13036 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13037 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13038 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
13039
13040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13042
13043 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13044 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13045 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13046
13047 IEM_MC_PREPARE_FPU_USAGE();
13048 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
13049 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
13050 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13051 } IEM_MC_ELSE() {
13052 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13053 } IEM_MC_ENDIF();
13054 IEM_MC_ADVANCE_RIP_AND_FINISH();
13055
13056 IEM_MC_END();
13057}
13058
13059
13060/** Opcode 0xde !11/4. */
13061FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
13062{
13063 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
13064 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
13065}
13066
13067
13068/** Opcode 0xde !11/5. */
13069FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
13070{
13071 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
13072 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
13073}
13074
13075
13076/** Opcode 0xde !11/6. */
13077FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
13078{
13079 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
13080 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
13081}
13082
13083
13084/** Opcode 0xde !11/7. */
13085FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
13086{
13087 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
13088 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
13089}
13090
13091
13092/**
13093 * @opcode 0xde
13094 */
13095FNIEMOP_DEF(iemOp_EscF6)
13096{
13097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13098 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
13099 if (IEM_IS_MODRM_REG_MODE(bRm))
13100 {
13101 switch (IEM_GET_MODRM_REG_8(bRm))
13102 {
13103 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
13104 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
13105 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13106 case 3: if (bRm == 0xd9)
13107 return FNIEMOP_CALL(iemOp_fcompp);
13108 IEMOP_RAISE_INVALID_OPCODE_RET();
13109 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
13110 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
13111 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
13112 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
13113 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13114 }
13115 }
13116 else
13117 {
13118 switch (IEM_GET_MODRM_REG_8(bRm))
13119 {
13120 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
13121 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
13122 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
13123 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
13124 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
13125 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
13126 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
13127 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
13128 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13129 }
13130 }
13131}
13132
13133
13134/** Opcode 0xdf 11/0.
13135 * Undocument instruction, assumed to work like ffree + fincstp. */
13136FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
13137{
13138 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
13139 IEM_MC_BEGIN(0, 0, 0, 0);
13140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13141
13142 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13143 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13144
13145 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13146 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
13147 IEM_MC_FPU_STACK_INC_TOP();
13148 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
13149
13150 IEM_MC_ADVANCE_RIP_AND_FINISH();
13151 IEM_MC_END();
13152}
13153
13154
13155/** Opcode 0xdf 0xe0. */
13156FNIEMOP_DEF(iemOp_fnstsw_ax)
13157{
13158 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
13159 IEM_MC_BEGIN(0, 1, 0, 0);
13160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13161 IEM_MC_LOCAL(uint16_t, u16Tmp);
13162 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13163 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
13164 IEM_MC_FETCH_FSW(u16Tmp);
13165 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
13166 IEM_MC_ADVANCE_RIP_AND_FINISH();
13167 IEM_MC_END();
13168}
13169
13170
13171/** Opcode 0xdf 11/5. */
13172FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
13173{
13174 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
13175 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
13176 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
13177 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
13178}
13179
13180
13181/** Opcode 0xdf 11/6. */
13182FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
13183{
13184 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
13185 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
13186 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
13187 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
13188}
13189
13190
13191/** Opcode 0xdf !11/0. */
13192FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
13193{
13194 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
13195
13196 IEM_MC_BEGIN(2, 3, 0, 0);
13197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13198 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13199 IEM_MC_LOCAL(int16_t, i16Val);
13200 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13201 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
13202
13203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13205
13206 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13207 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13208 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13209
13210 IEM_MC_PREPARE_FPU_USAGE();
13211 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
13212 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
13213 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13214 } IEM_MC_ELSE() {
13215 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13216 } IEM_MC_ENDIF();
13217 IEM_MC_ADVANCE_RIP_AND_FINISH();
13218
13219 IEM_MC_END();
13220}
13221
13222
13223/** Opcode 0xdf !11/1. */
13224FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
13225{
13226 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
13227 IEM_MC_BEGIN(3, 3, 0, 0);
13228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13230
13231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13232 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13233 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13234 IEM_MC_PREPARE_FPU_USAGE();
13235
13236 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13237 IEM_MC_ARG(int16_t *, pi16Dst, 1);
13238 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13239
13240 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13241 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13242 IEM_MC_LOCAL(uint16_t, u16Fsw);
13243 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13244 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
13245 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13246 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13247 } IEM_MC_ELSE() {
13248 IEM_MC_IF_FCW_IM() {
13249 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
13250 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13251 } IEM_MC_ELSE() {
13252 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13253 } IEM_MC_ENDIF();
13254 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13255 } IEM_MC_ENDIF();
13256 IEM_MC_ADVANCE_RIP_AND_FINISH();
13257
13258 IEM_MC_END();
13259}
13260
13261
13262/** Opcode 0xdf !11/2. */
13263FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
13264{
13265 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
13266 IEM_MC_BEGIN(3, 3, 0, 0);
13267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13268 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13269
13270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13271 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13272 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13273 IEM_MC_PREPARE_FPU_USAGE();
13274
13275 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13276 IEM_MC_ARG(int16_t *, pi16Dst, 1);
13277 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13278
13279 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13280 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13281 IEM_MC_LOCAL(uint16_t, u16Fsw);
13282 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13283 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
13284 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13285 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13286 } IEM_MC_ELSE() {
13287 IEM_MC_IF_FCW_IM() {
13288 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
13289 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13290 } IEM_MC_ELSE() {
13291 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13292 } IEM_MC_ENDIF();
13293 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13294 } IEM_MC_ENDIF();
13295 IEM_MC_ADVANCE_RIP_AND_FINISH();
13296
13297 IEM_MC_END();
13298}
13299
13300
13301/** Opcode 0xdf !11/3. */
13302FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
13303{
13304 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
13305 IEM_MC_BEGIN(3, 3, 0, 0);
13306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13308
13309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13310 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13311 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13312 IEM_MC_PREPARE_FPU_USAGE();
13313
13314 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13315 IEM_MC_ARG(int16_t *, pi16Dst, 1);
13316 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13317
13318 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13319 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13320 IEM_MC_LOCAL(uint16_t, u16Fsw);
13321 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13322 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
13323 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13324 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13325 } IEM_MC_ELSE() {
13326 IEM_MC_IF_FCW_IM() {
13327 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
13328 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13329 } IEM_MC_ELSE() {
13330 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13331 } IEM_MC_ENDIF();
13332 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13333 } IEM_MC_ENDIF();
13334 IEM_MC_ADVANCE_RIP_AND_FINISH();
13335
13336 IEM_MC_END();
13337}
13338
13339
13340/** Opcode 0xdf !11/4. */
13341FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
13342{
13343 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
13344
13345 IEM_MC_BEGIN(2, 3, 0, 0);
13346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13347 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13348 IEM_MC_LOCAL(RTPBCD80U, d80Val);
13349 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13350 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
13351
13352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13354
13355 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13356 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13357 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13358
13359 IEM_MC_PREPARE_FPU_USAGE();
13360 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
13361 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
13362 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13363 } IEM_MC_ELSE() {
13364 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13365 } IEM_MC_ENDIF();
13366 IEM_MC_ADVANCE_RIP_AND_FINISH();
13367
13368 IEM_MC_END();
13369}
13370
13371
13372/** Opcode 0xdf !11/5. */
13373FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
13374{
13375 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
13376
13377 IEM_MC_BEGIN(2, 3, 0, 0);
13378 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13379 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13380 IEM_MC_LOCAL(int64_t, i64Val);
13381 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13382 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
13383
13384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13386
13387 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13388 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13389 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13390
13391 IEM_MC_PREPARE_FPU_USAGE();
13392 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
13393 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
13394 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13395 } IEM_MC_ELSE() {
13396 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13397 } IEM_MC_ENDIF();
13398 IEM_MC_ADVANCE_RIP_AND_FINISH();
13399
13400 IEM_MC_END();
13401}
13402
13403
13404/** Opcode 0xdf !11/6. */
13405FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
13406{
13407 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
13408 IEM_MC_BEGIN(3, 3, 0, 0);
13409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13411
13412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13413 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13414 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13415 IEM_MC_PREPARE_FPU_USAGE();
13416
13417 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13418 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
13419 IEM_MC_MEM_MAP_D80_WO(pd80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13420
13421 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13422 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13423 IEM_MC_LOCAL(uint16_t, u16Fsw);
13424 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13425 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
13426 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13427 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13428 } IEM_MC_ELSE() {
13429 IEM_MC_IF_FCW_IM() {
13430 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
13431 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13432 } IEM_MC_ELSE() {
13433 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13434 } IEM_MC_ENDIF();
13435 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13436 } IEM_MC_ENDIF();
13437 IEM_MC_ADVANCE_RIP_AND_FINISH();
13438
13439 IEM_MC_END();
13440}
13441
13442
13443/** Opcode 0xdf !11/7. */
13444FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
13445{
13446 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
13447 IEM_MC_BEGIN(3, 3, 0, 0);
13448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13450
13451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13452 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13453 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13454 IEM_MC_PREPARE_FPU_USAGE();
13455
13456 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13457 IEM_MC_ARG(int64_t *, pi64Dst, 1);
13458 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13459
13460 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13461 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13462 IEM_MC_LOCAL(uint16_t, u16Fsw);
13463 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13464 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
13465 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13466 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13467 } IEM_MC_ELSE() {
13468 IEM_MC_IF_FCW_IM() {
13469 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
13470 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13471 } IEM_MC_ELSE() {
13472 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13473 } IEM_MC_ENDIF();
13474 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13475 } IEM_MC_ENDIF();
13476 IEM_MC_ADVANCE_RIP_AND_FINISH();
13477
13478 IEM_MC_END();
13479}
13480
13481
13482/**
13483 * @opcode 0xdf
13484 */
13485FNIEMOP_DEF(iemOp_EscF7)
13486{
13487 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13488 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
13489 if (IEM_IS_MODRM_REG_MODE(bRm))
13490 {
13491 switch (IEM_GET_MODRM_REG_8(bRm))
13492 {
13493 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
13494 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
13495 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13496 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13497 case 4: if (bRm == 0xe0)
13498 return FNIEMOP_CALL(iemOp_fnstsw_ax);
13499 IEMOP_RAISE_INVALID_OPCODE_RET();
13500 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
13501 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
13502 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
13503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13504 }
13505 }
13506 else
13507 {
13508 switch (IEM_GET_MODRM_REG_8(bRm))
13509 {
13510 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
13511 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
13512 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
13513 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
13514 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
13515 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
13516 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
13517 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
13518 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13519 }
13520 }
13521}
13522
13523
13524/**
13525 * @opcode 0xe0
13526 * @opfltest zf
13527 */
13528FNIEMOP_DEF(iemOp_loopne_Jb)
13529{
13530 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
13531 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13532 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13533
13534 switch (pVCpu->iem.s.enmEffAddrMode)
13535 {
13536 case IEMMODE_16BIT:
13537 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13539 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13540 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13541 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13542 } IEM_MC_ELSE() {
13543 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13544 IEM_MC_ADVANCE_RIP_AND_FINISH();
13545 } IEM_MC_ENDIF();
13546 IEM_MC_END();
13547 break;
13548
13549 case IEMMODE_32BIT:
13550 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13552 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13553 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13554 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13555 } IEM_MC_ELSE() {
13556 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13557 IEM_MC_ADVANCE_RIP_AND_FINISH();
13558 } IEM_MC_ENDIF();
13559 IEM_MC_END();
13560 break;
13561
13562 case IEMMODE_64BIT:
13563 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13565 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13566 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13567 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13568 } IEM_MC_ELSE() {
13569 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13570 IEM_MC_ADVANCE_RIP_AND_FINISH();
13571 } IEM_MC_ENDIF();
13572 IEM_MC_END();
13573 break;
13574
13575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13576 }
13577}
13578
13579
13580/**
13581 * @opcode 0xe1
13582 * @opfltest zf
13583 */
13584FNIEMOP_DEF(iemOp_loope_Jb)
13585{
13586 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
13587 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13588 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13589
13590 switch (pVCpu->iem.s.enmEffAddrMode)
13591 {
13592 case IEMMODE_16BIT:
13593 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13595 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13596 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13597 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13598 } IEM_MC_ELSE() {
13599 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13600 IEM_MC_ADVANCE_RIP_AND_FINISH();
13601 } IEM_MC_ENDIF();
13602 IEM_MC_END();
13603 break;
13604
13605 case IEMMODE_32BIT:
13606 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13608 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13609 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13610 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13611 } IEM_MC_ELSE() {
13612 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13613 IEM_MC_ADVANCE_RIP_AND_FINISH();
13614 } IEM_MC_ENDIF();
13615 IEM_MC_END();
13616 break;
13617
13618 case IEMMODE_64BIT:
13619 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13621 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13622 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13623 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13624 } IEM_MC_ELSE() {
13625 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13626 IEM_MC_ADVANCE_RIP_AND_FINISH();
13627 } IEM_MC_ENDIF();
13628 IEM_MC_END();
13629 break;
13630
13631 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13632 }
13633}
13634
13635
13636/**
13637 * @opcode 0xe2
13638 */
13639FNIEMOP_DEF(iemOp_loop_Jb)
13640{
13641 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
13642 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13643 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13644
13645 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
13646 * using the 32-bit operand size override. How can that be restarted? See
13647 * weird pseudo code in intel manual. */
13648
13649 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
13650 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
13651 * the loop causes guest crashes, but when logging it's nice to skip a few million
13652 * lines of useless output. */
13653#if defined(LOG_ENABLED)
13654 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
13655 switch (pVCpu->iem.s.enmEffAddrMode)
13656 {
13657 case IEMMODE_16BIT:
13658 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13660 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13661 IEM_MC_ADVANCE_RIP_AND_FINISH();
13662 IEM_MC_END();
13663 break;
13664
13665 case IEMMODE_32BIT:
13666 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13668 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13669 IEM_MC_ADVANCE_RIP_AND_FINISH();
13670 IEM_MC_END();
13671 break;
13672
13673 case IEMMODE_64BIT:
13674 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13676 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13677 IEM_MC_ADVANCE_RIP_AND_FINISH();
13678 IEM_MC_END();
13679 break;
13680
13681 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13682 }
13683#endif
13684
13685 switch (pVCpu->iem.s.enmEffAddrMode)
13686 {
13687 case IEMMODE_16BIT:
13688 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13690 IEM_MC_IF_CX_IS_NOT_ONE() {
13691 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13692 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13693 } IEM_MC_ELSE() {
13694 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13695 IEM_MC_ADVANCE_RIP_AND_FINISH();
13696 } IEM_MC_ENDIF();
13697 IEM_MC_END();
13698 break;
13699
13700 case IEMMODE_32BIT:
13701 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13703 IEM_MC_IF_ECX_IS_NOT_ONE() {
13704 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13705 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13706 } IEM_MC_ELSE() {
13707 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13708 IEM_MC_ADVANCE_RIP_AND_FINISH();
13709 } IEM_MC_ENDIF();
13710 IEM_MC_END();
13711 break;
13712
13713 case IEMMODE_64BIT:
13714 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13716 IEM_MC_IF_RCX_IS_NOT_ONE() {
13717 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13718 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13719 } IEM_MC_ELSE() {
13720 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13721 IEM_MC_ADVANCE_RIP_AND_FINISH();
13722 } IEM_MC_ENDIF();
13723 IEM_MC_END();
13724 break;
13725
13726 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13727 }
13728}
13729
13730
13731/**
13732 * @opcode 0xe3
13733 */
13734FNIEMOP_DEF(iemOp_jecxz_Jb)
13735{
13736 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
13737 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13738 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13739
13740 switch (pVCpu->iem.s.enmEffAddrMode)
13741 {
13742 case IEMMODE_16BIT:
13743 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13745 IEM_MC_IF_CX_IS_NZ() {
13746 IEM_MC_ADVANCE_RIP_AND_FINISH();
13747 } IEM_MC_ELSE() {
13748 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13749 } IEM_MC_ENDIF();
13750 IEM_MC_END();
13751 break;
13752
13753 case IEMMODE_32BIT:
13754 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13756 IEM_MC_IF_ECX_IS_NZ() {
13757 IEM_MC_ADVANCE_RIP_AND_FINISH();
13758 } IEM_MC_ELSE() {
13759 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13760 } IEM_MC_ENDIF();
13761 IEM_MC_END();
13762 break;
13763
13764 case IEMMODE_64BIT:
13765 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13767 IEM_MC_IF_RCX_IS_NZ() {
13768 IEM_MC_ADVANCE_RIP_AND_FINISH();
13769 } IEM_MC_ELSE() {
13770 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13771 } IEM_MC_ENDIF();
13772 IEM_MC_END();
13773 break;
13774
13775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13776 }
13777}
13778
13779
13780/**
13781 * @opcode 0xe4
13782 * @opfltest iopl
13783 */
13784FNIEMOP_DEF(iemOp_in_AL_Ib)
13785{
13786 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
13787 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13789 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13790 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13791}
13792
13793
13794/**
13795 * @opcode 0xe5
13796 * @opfltest iopl
13797 */
13798FNIEMOP_DEF(iemOp_in_eAX_Ib)
13799{
13800 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
13801 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13803 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13804 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13805 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13806}
13807
13808
13809/**
13810 * @opcode 0xe6
13811 * @opfltest iopl
13812 */
13813FNIEMOP_DEF(iemOp_out_Ib_AL)
13814{
13815 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
13816 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13818 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13819 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13820}
13821
13822
13823/**
13824 * @opcode 0xe7
13825 * @opfltest iopl
13826 */
13827FNIEMOP_DEF(iemOp_out_Ib_eAX)
13828{
13829 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
13830 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13832 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13833 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13834 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13835}
13836
13837
13838/**
13839 * @opcode 0xe8
13840 */
13841FNIEMOP_DEF(iemOp_call_Jv)
13842{
13843 IEMOP_MNEMONIC(call_Jv, "call Jv");
13844 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13845 switch (pVCpu->iem.s.enmEffOpSize)
13846 {
13847 case IEMMODE_16BIT:
13848 {
13849 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13850 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13851 iemCImpl_call_rel_16, (int16_t)u16Imm);
13852 }
13853
13854 case IEMMODE_32BIT:
13855 {
13856 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13857 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13858 iemCImpl_call_rel_32, (int32_t)u32Imm);
13859 }
13860
13861 case IEMMODE_64BIT:
13862 {
13863 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13864 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13865 iemCImpl_call_rel_64, u64Imm);
13866 }
13867
13868 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13869 }
13870}
13871
13872
13873/**
13874 * @opcode 0xe9
13875 */
13876FNIEMOP_DEF(iemOp_jmp_Jv)
13877{
13878 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
13879 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13880 switch (pVCpu->iem.s.enmEffOpSize)
13881 {
13882 case IEMMODE_16BIT:
13883 IEM_MC_BEGIN(0, 0, 0, 0);
13884 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
13885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13886 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
13887 IEM_MC_END();
13888 break;
13889
13890 case IEMMODE_64BIT:
13891 case IEMMODE_32BIT:
13892 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13893 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
13894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13895 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
13896 IEM_MC_END();
13897 break;
13898
13899 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13900 }
13901}
13902
13903
13904/**
13905 * @opcode 0xea
13906 */
13907FNIEMOP_DEF(iemOp_jmp_Ap)
13908{
13909 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
13910 IEMOP_HLP_NO_64BIT();
13911
13912 /* Decode the far pointer address and pass it on to the far call C implementation. */
13913 uint32_t off32Seg;
13914 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
13915 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
13916 else
13917 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
13918 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
13919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13920 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
13921 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
13922 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
13923 /** @todo make task-switches, ring-switches, ++ return non-zero status */
13924}
13925
13926
13927/**
13928 * @opcode 0xeb
13929 */
13930FNIEMOP_DEF(iemOp_jmp_Jb)
13931{
13932 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
13933 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13934 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13935
13936 IEM_MC_BEGIN(0, 0, 0, 0);
13937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13938 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13939 IEM_MC_END();
13940}
13941
13942
13943/**
13944 * @opcode 0xec
13945 * @opfltest iopl
13946 */
13947FNIEMOP_DEF(iemOp_in_AL_DX)
13948{
13949 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
13950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13951 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
13952 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13953 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
13954}
13955
13956
13957/**
13958 * @opcode 0xed
13959 * @opfltest iopl
13960 */
13961FNIEMOP_DEF(iemOp_in_eAX_DX)
13962{
13963 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
13964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13965 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
13966 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13967 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13968 pVCpu->iem.s.enmEffAddrMode);
13969}
13970
13971
13972/**
13973 * @opcode 0xee
13974 * @opfltest iopl
13975 */
13976FNIEMOP_DEF(iemOp_out_DX_AL)
13977{
13978 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
13979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13980 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13981 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
13982}
13983
13984
13985/**
13986 * @opcode 0xef
13987 * @opfltest iopl
13988 */
13989FNIEMOP_DEF(iemOp_out_DX_eAX)
13990{
13991 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
13992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13993 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13994 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13995 pVCpu->iem.s.enmEffAddrMode);
13996}
13997
13998
13999/**
14000 * @opcode 0xf0
14001 */
14002FNIEMOP_DEF(iemOp_lock)
14003{
14004 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
14005 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
14006
14007 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14008 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14009}
14010
14011
14012/**
14013 * @opcode 0xf1
14014 */
14015FNIEMOP_DEF(iemOp_int1)
14016{
14017 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
14018 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
14019 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
14020 * LOADALL memo. Needs some testing. */
14021 IEMOP_HLP_MIN_386();
14022 /** @todo testcase! */
14023 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
14024 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
14025 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
14026}
14027
14028
14029/**
14030 * @opcode 0xf2
14031 */
14032FNIEMOP_DEF(iemOp_repne)
14033{
14034 /* This overrides any previous REPE prefix. */
14035 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
14036 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
14037 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
14038
14039 /* For the 4 entry opcode tables, REPNZ overrides any previous
14040 REPZ and operand size prefixes. */
14041 pVCpu->iem.s.idxPrefix = 3;
14042
14043 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14044 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14045}
14046
14047
14048/**
14049 * @opcode 0xf3
14050 */
14051FNIEMOP_DEF(iemOp_repe)
14052{
14053 /* This overrides any previous REPNE prefix. */
14054 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
14055 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
14056 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
14057
14058 /* For the 4 entry opcode tables, REPNZ overrides any previous
14059 REPNZ and operand size prefixes. */
14060 pVCpu->iem.s.idxPrefix = 2;
14061
14062 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14063 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14064}
14065
14066
14067/**
14068 * @opcode 0xf4
14069 */
14070FNIEMOP_DEF(iemOp_hlt)
14071{
14072 IEMOP_MNEMONIC(hlt, "hlt");
14073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14074 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_hlt);
14075}
14076
14077
14078/**
14079 * @opcode 0xf5
14080 * @opflmodify cf
14081 */
14082FNIEMOP_DEF(iemOp_cmc)
14083{
14084 IEMOP_MNEMONIC(cmc, "cmc");
14085 IEM_MC_BEGIN(0, 0, 0, 0);
14086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14087 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
14088 IEM_MC_ADVANCE_RIP_AND_FINISH();
14089 IEM_MC_END();
14090}
14091
14092
14093/**
14094 * Body for of 'inc/dec/not/neg Eb'.
14095 */
14096#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
14097 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
14098 { \
14099 /* register access */ \
14100 IEM_MC_BEGIN(2, 0, 0, 0); \
14101 IEMOP_HLP_DONE_DECODING(); \
14102 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
14103 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
14104 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
14105 IEM_MC_REF_EFLAGS(pEFlags); \
14106 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
14107 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14108 IEM_MC_END(); \
14109 } \
14110 else \
14111 { \
14112 /* memory access. */ \
14113 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
14114 { \
14115 IEM_MC_BEGIN(2, 2, 0, 0); \
14116 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
14117 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14119 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14120 \
14121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
14122 IEMOP_HLP_DONE_DECODING(); \
14123 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14124 IEM_MC_FETCH_EFLAGS(EFlags); \
14125 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
14126 \
14127 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
14128 IEM_MC_COMMIT_EFLAGS(EFlags); \
14129 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14130 IEM_MC_END(); \
14131 } \
14132 else \
14133 { \
14134 IEM_MC_BEGIN(2, 2, 0, 0); \
14135 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
14136 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14137 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14138 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14139 \
14140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
14141 IEMOP_HLP_DONE_DECODING(); \
14142 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14143 IEM_MC_FETCH_EFLAGS(EFlags); \
14144 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
14145 \
14146 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14147 IEM_MC_COMMIT_EFLAGS(EFlags); \
14148 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14149 IEM_MC_END(); \
14150 } \
14151 } \
14152 (void)0
14153
14154
14155/**
14156 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
14157 */
14158#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
14159 if (IEM_IS_MODRM_REG_MODE(bRm)) \
14160 { \
14161 /* \
14162 * Register target \
14163 */ \
14164 switch (pVCpu->iem.s.enmEffOpSize) \
14165 { \
14166 case IEMMODE_16BIT: \
14167 IEM_MC_BEGIN(2, 0, 0, 0); \
14168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14169 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
14170 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
14171 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14172 IEM_MC_REF_EFLAGS(pEFlags); \
14173 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
14174 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14175 IEM_MC_END(); \
14176 break; \
14177 \
14178 case IEMMODE_32BIT: \
14179 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0); \
14180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14181 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
14182 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
14183 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14184 IEM_MC_REF_EFLAGS(pEFlags); \
14185 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
14186 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
14187 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14188 IEM_MC_END(); \
14189 break; \
14190 \
14191 case IEMMODE_64BIT: \
14192 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0); \
14193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14194 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
14195 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
14196 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14197 IEM_MC_REF_EFLAGS(pEFlags); \
14198 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
14199 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14200 IEM_MC_END(); \
14201 break; \
14202 \
14203 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14204 } \
14205 } \
14206 else \
14207 { \
14208 /* \
14209 * Memory target. \
14210 */ \
14211 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
14212 { \
14213 switch (pVCpu->iem.s.enmEffOpSize) \
14214 { \
14215 case IEMMODE_16BIT: \
14216 IEM_MC_BEGIN(2, 3, 0, 0); \
14217 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
14218 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14220 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14221 \
14222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14223 IEMOP_HLP_DONE_DECODING(); \
14224 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14225 IEM_MC_FETCH_EFLAGS(EFlags); \
14226 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
14227 \
14228 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
14229 IEM_MC_COMMIT_EFLAGS(EFlags); \
14230 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14231 IEM_MC_END(); \
14232 break; \
14233 \
14234 case IEMMODE_32BIT: \
14235 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
14236 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
14237 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14239 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14240 \
14241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14242 IEMOP_HLP_DONE_DECODING(); \
14243 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14244 IEM_MC_FETCH_EFLAGS(EFlags); \
14245 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
14246 \
14247 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
14248 IEM_MC_COMMIT_EFLAGS(EFlags); \
14249 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14250 IEM_MC_END(); \
14251 break; \
14252 \
14253 case IEMMODE_64BIT: \
14254 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
14255 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
14256 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14258 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14259 \
14260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14261 IEMOP_HLP_DONE_DECODING(); \
14262 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14263 IEM_MC_FETCH_EFLAGS(EFlags); \
14264 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
14265 \
14266 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
14267 IEM_MC_COMMIT_EFLAGS(EFlags); \
14268 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14269 IEM_MC_END(); \
14270 break; \
14271 \
14272 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14273 } \
14274 } \
14275 else \
14276 { \
14277 (void)0
14278
14279#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
14280 switch (pVCpu->iem.s.enmEffOpSize) \
14281 { \
14282 case IEMMODE_16BIT: \
14283 IEM_MC_BEGIN(2, 3, 0, 0); \
14284 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
14285 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14287 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14288 \
14289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14290 IEMOP_HLP_DONE_DECODING(); \
14291 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14292 IEM_MC_FETCH_EFLAGS(EFlags); \
14293 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
14294 \
14295 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14296 IEM_MC_COMMIT_EFLAGS(EFlags); \
14297 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14298 IEM_MC_END(); \
14299 break; \
14300 \
14301 case IEMMODE_32BIT: \
14302 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
14303 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
14304 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14305 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14306 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14307 \
14308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14309 IEMOP_HLP_DONE_DECODING(); \
14310 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14311 IEM_MC_FETCH_EFLAGS(EFlags); \
14312 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
14313 \
14314 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14315 IEM_MC_COMMIT_EFLAGS(EFlags); \
14316 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14317 IEM_MC_END(); \
14318 break; \
14319 \
14320 case IEMMODE_64BIT: \
14321 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
14322 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
14323 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14325 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14326 \
14327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14328 IEMOP_HLP_DONE_DECODING(); \
14329 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14330 IEM_MC_FETCH_EFLAGS(EFlags); \
14331 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
14332 \
14333 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14334 IEM_MC_COMMIT_EFLAGS(EFlags); \
14335 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14336 IEM_MC_END(); \
14337 break; \
14338 \
14339 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14340 } \
14341 } \
14342 } \
14343 (void)0
14344
14345
14346/**
14347 * @opmaps grp3_f6
14348 * @opcode /0
14349 * @opflclass logical
14350 * @todo also /1
14351 */
14352FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
14353{
14354 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
14355 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14356
14357 if (IEM_IS_MODRM_REG_MODE(bRm))
14358 {
14359 /* register access */
14360 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14361 IEM_MC_BEGIN(3, 0, 0, 0);
14362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14363 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14364 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
14365 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14366 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14367 IEM_MC_REF_EFLAGS(pEFlags);
14368 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
14369 IEM_MC_ADVANCE_RIP_AND_FINISH();
14370 IEM_MC_END();
14371 }
14372 else
14373 {
14374 /* memory access. */
14375 IEM_MC_BEGIN(3, 3, 0, 0);
14376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
14378
14379 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14381
14382 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14383 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
14384 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14385
14386 IEM_MC_ARG_CONST(uint8_t, u8Src, u8Imm, 1);
14387 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14388 IEM_MC_FETCH_EFLAGS(EFlags);
14389 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
14390
14391 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14392 IEM_MC_COMMIT_EFLAGS(EFlags);
14393 IEM_MC_ADVANCE_RIP_AND_FINISH();
14394 IEM_MC_END();
14395 }
14396}
14397
14398
14399/* Body for opcode 0xf6 variations /4, /5, /6 and /7. */
14400#define IEMOP_GRP3_MUL_DIV_EB(bRm, a_pfnU8Expr) \
14401 PFNIEMAIMPLMULDIVU8 const pfnU8 = (a_pfnU8Expr); \
14402 if (IEM_IS_MODRM_REG_MODE(bRm)) \
14403 { \
14404 /* register access */ \
14405 IEM_MC_BEGIN(3, 1, 0, 0); \
14406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14407 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14408 IEM_MC_ARG(uint8_t, u8Value, 1); \
14409 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
14410 IEM_MC_LOCAL(int32_t, rc); \
14411 \
14412 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14413 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14414 IEM_MC_REF_EFLAGS(pEFlags); \
14415 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags); \
14416 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14417 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14418 } IEM_MC_ELSE() { \
14419 IEM_MC_RAISE_DIVIDE_ERROR(); \
14420 } IEM_MC_ENDIF(); \
14421 \
14422 IEM_MC_END(); \
14423 } \
14424 else \
14425 { \
14426 /* memory access. */ \
14427 IEM_MC_BEGIN(3, 2, 0, 0); \
14428 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14429 IEM_MC_ARG(uint8_t, u8Value, 1); \
14430 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
14431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14432 IEM_MC_LOCAL(int32_t, rc); \
14433 \
14434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14436 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14437 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14438 IEM_MC_REF_EFLAGS(pEFlags); \
14439 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags); \
14440 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14441 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14442 } IEM_MC_ELSE() { \
14443 IEM_MC_RAISE_DIVIDE_ERROR(); \
14444 } IEM_MC_ENDIF(); \
14445 \
14446 IEM_MC_END(); \
14447 } (void)0
14448
14449
14450/* Body for opcode 0xf7 variant /4, /5, /6 and /7. */
14451#define IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, a_pImplExpr) \
14452 PCIEMOPMULDIVSIZES const pImpl = (a_pImplExpr); \
14453 if (IEM_IS_MODRM_REG_MODE(bRm)) \
14454 { \
14455 /* register access */ \
14456 switch (pVCpu->iem.s.enmEffOpSize) \
14457 { \
14458 case IEMMODE_16BIT: \
14459 IEM_MC_BEGIN(4, 1, 0, 0); \
14460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14461 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14462 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
14463 IEM_MC_ARG(uint16_t, u16Value, 2); \
14464 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14465 IEM_MC_LOCAL(int32_t, rc); \
14466 \
14467 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14468 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14469 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
14470 IEM_MC_REF_EFLAGS(pEFlags); \
14471 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
14472 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14473 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14474 } IEM_MC_ELSE() { \
14475 IEM_MC_RAISE_DIVIDE_ERROR(); \
14476 } IEM_MC_ENDIF(); \
14477 \
14478 IEM_MC_END(); \
14479 break; \
14480 \
14481 case IEMMODE_32BIT: \
14482 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386, 0); \
14483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14484 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
14485 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
14486 IEM_MC_ARG(uint32_t, u32Value, 2); \
14487 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14488 IEM_MC_LOCAL(int32_t, rc); \
14489 \
14490 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14491 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
14492 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
14493 IEM_MC_REF_EFLAGS(pEFlags); \
14494 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
14495 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14496 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
14497 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
14498 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14499 } IEM_MC_ELSE() { \
14500 IEM_MC_RAISE_DIVIDE_ERROR(); \
14501 } IEM_MC_ENDIF(); \
14502 \
14503 IEM_MC_END(); \
14504 break; \
14505 \
14506 case IEMMODE_64BIT: \
14507 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0); \
14508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14509 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
14510 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
14511 IEM_MC_ARG(uint64_t, u64Value, 2); \
14512 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14513 IEM_MC_LOCAL(int32_t, rc); \
14514 \
14515 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14516 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
14517 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
14518 IEM_MC_REF_EFLAGS(pEFlags); \
14519 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
14520 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14521 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14522 } IEM_MC_ELSE() { \
14523 IEM_MC_RAISE_DIVIDE_ERROR(); \
14524 } IEM_MC_ENDIF(); \
14525 \
14526 IEM_MC_END(); \
14527 break; \
14528 \
14529 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14530 } \
14531 } \
14532 else \
14533 { \
14534 /* memory access. */ \
14535 switch (pVCpu->iem.s.enmEffOpSize) \
14536 { \
14537 case IEMMODE_16BIT: \
14538 IEM_MC_BEGIN(4, 2, 0, 0); \
14539 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14540 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
14541 IEM_MC_ARG(uint16_t, u16Value, 2); \
14542 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14544 IEM_MC_LOCAL(int32_t, rc); \
14545 \
14546 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14548 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14549 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14550 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
14551 IEM_MC_REF_EFLAGS(pEFlags); \
14552 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
14553 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14554 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14555 } IEM_MC_ELSE() { \
14556 IEM_MC_RAISE_DIVIDE_ERROR(); \
14557 } IEM_MC_ENDIF(); \
14558 \
14559 IEM_MC_END(); \
14560 break; \
14561 \
14562 case IEMMODE_32BIT: \
14563 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386, 0); \
14564 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
14565 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
14566 IEM_MC_ARG(uint32_t, u32Value, 2); \
14567 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14569 IEM_MC_LOCAL(int32_t, rc); \
14570 \
14571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14573 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14574 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
14575 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
14576 IEM_MC_REF_EFLAGS(pEFlags); \
14577 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
14578 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14579 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
14580 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
14581 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14582 } IEM_MC_ELSE() { \
14583 IEM_MC_RAISE_DIVIDE_ERROR(); \
14584 } IEM_MC_ENDIF(); \
14585 \
14586 IEM_MC_END(); \
14587 break; \
14588 \
14589 case IEMMODE_64BIT: \
14590 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT, 0); \
14591 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
14592 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
14593 IEM_MC_ARG(uint64_t, u64Value, 2); \
14594 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14596 IEM_MC_LOCAL(int32_t, rc); \
14597 \
14598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14600 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14601 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
14602 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
14603 IEM_MC_REF_EFLAGS(pEFlags); \
14604 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
14605 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14606 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14607 } IEM_MC_ELSE() { \
14608 IEM_MC_RAISE_DIVIDE_ERROR(); \
14609 } IEM_MC_ENDIF(); \
14610 \
14611 IEM_MC_END(); \
14612 break; \
14613 \
14614 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14615 } \
14616 } (void)0
14617
14618
14619/**
14620 * @opmaps grp3_f6
14621 * @opcode /2
14622 * @opflclass unchanged
14623 */
14624FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
14625{
14626/** @todo does not modify EFLAGS. */
14627 IEMOP_MNEMONIC(not_Eb, "not Eb");
14628 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
14629}
14630
14631
14632/**
14633 * @opmaps grp3_f6
14634 * @opcode /3
14635 * @opflclass arithmetic
14636 */
14637FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
14638{
14639 IEMOP_MNEMONIC(net_Eb, "neg Eb");
14640 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
14641}
14642
14643
14644/**
14645 * @opcode 0xf6
14646 */
14647FNIEMOP_DEF(iemOp_Grp3_Eb)
14648{
14649 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14650 switch (IEM_GET_MODRM_REG_8(bRm))
14651 {
14652 case 0:
14653 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
14654 case 1:
14655 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
14656 case 2:
14657 return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
14658 case 3:
14659 return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
14660 case 4:
14661 {
14662 /**
14663 * @opdone
14664 * @opmaps grp3_f6
14665 * @opcode /4
14666 * @opflclass multiply
14667 */
14668 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
14669 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14670 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
14671 break;
14672 }
14673 case 5:
14674 {
14675 /**
14676 * @opdone
14677 * @opmaps grp3_f6
14678 * @opcode /5
14679 * @opflclass multiply
14680 */
14681 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
14682 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14683 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
14684 break;
14685 }
14686 case 6:
14687 {
14688 /**
14689 * @opdone
14690 * @opmaps grp3_f6
14691 * @opcode /6
14692 * @opflclass division
14693 */
14694 IEMOP_MNEMONIC(div_Eb, "div Eb");
14695 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14696 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
14697 break;
14698 }
14699 case 7:
14700 {
14701 /**
14702 * @opdone
14703 * @opmaps grp3_f6
14704 * @opcode /7
14705 * @opflclass division
14706 */
14707 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
14708 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14709 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
14710 break;
14711 }
14712 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14713 }
14714}
14715
14716
14717/**
14718 * @opmaps grp3_f7
14719 * @opcode /0
14720 * @opflclass logical
14721 */
14722FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
14723{
14724 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
14725 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14726
14727 if (IEM_IS_MODRM_REG_MODE(bRm))
14728 {
14729 /* register access */
14730 switch (pVCpu->iem.s.enmEffOpSize)
14731 {
14732 case IEMMODE_16BIT:
14733 IEM_MC_BEGIN(3, 0, 0, 0);
14734 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14736 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14737 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
14738 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14739 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14740 IEM_MC_REF_EFLAGS(pEFlags);
14741 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14742 IEM_MC_ADVANCE_RIP_AND_FINISH();
14743 IEM_MC_END();
14744 break;
14745
14746 case IEMMODE_32BIT:
14747 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
14748 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14750 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14751 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
14752 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14753 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14754 IEM_MC_REF_EFLAGS(pEFlags);
14755 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14756 /* No clearing the high dword here - test doesn't write back the result. */
14757 IEM_MC_ADVANCE_RIP_AND_FINISH();
14758 IEM_MC_END();
14759 break;
14760
14761 case IEMMODE_64BIT:
14762 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
14763 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14765 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14766 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
14767 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14768 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14769 IEM_MC_REF_EFLAGS(pEFlags);
14770 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14771 IEM_MC_ADVANCE_RIP_AND_FINISH();
14772 IEM_MC_END();
14773 break;
14774
14775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14776 }
14777 }
14778 else
14779 {
14780 /* memory access. */
14781 switch (pVCpu->iem.s.enmEffOpSize)
14782 {
14783 case IEMMODE_16BIT:
14784 IEM_MC_BEGIN(3, 3, 0, 0);
14785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
14787
14788 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14790
14791 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14792 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
14793 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14794
14795 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
14796 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14797 IEM_MC_FETCH_EFLAGS(EFlags);
14798 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14799
14800 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14801 IEM_MC_COMMIT_EFLAGS(EFlags);
14802 IEM_MC_ADVANCE_RIP_AND_FINISH();
14803 IEM_MC_END();
14804 break;
14805
14806 case IEMMODE_32BIT:
14807 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
14808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
14810
14811 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14813
14814 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14815 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
14816 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14817
14818 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
14819 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14820 IEM_MC_FETCH_EFLAGS(EFlags);
14821 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14822
14823 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14824 IEM_MC_COMMIT_EFLAGS(EFlags);
14825 IEM_MC_ADVANCE_RIP_AND_FINISH();
14826 IEM_MC_END();
14827 break;
14828
14829 case IEMMODE_64BIT:
14830 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
14831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
14833
14834 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14836
14837 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
14838 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14839 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14840
14841 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1);
14842 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14843 IEM_MC_FETCH_EFLAGS(EFlags);
14844 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14845
14846 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14847 IEM_MC_COMMIT_EFLAGS(EFlags);
14848 IEM_MC_ADVANCE_RIP_AND_FINISH();
14849 IEM_MC_END();
14850 break;
14851
14852 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14853 }
14854 }
14855}
14856
14857
14858/**
14859 * @opmaps grp3_f7
14860 * @opcode /2
14861 * @opflclass unchanged
14862 */
14863FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
14864{
14865/** @todo does not modify EFLAGS */
14866 IEMOP_MNEMONIC(not_Ev, "not Ev");
14867 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
14868 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
14869}
14870
14871
14872/**
14873 * @opmaps grp3_f7
14874 * @opcode /3
14875 * @opflclass arithmetic
14876 */
14877FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
14878{
14879 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
14880 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
14881 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
14882}
14883
14884
14885/**
14886 * @opmaps grp3_f7
14887 * @opcode /4
14888 * @opflclass multiply
14889 */
14890FNIEMOP_DEF_1(iemOp_grp3_mul_Ev, uint8_t, bRm)
14891{
14892 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
14893 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14894 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
14895}
14896
14897
14898/**
14899 * @opmaps grp3_f7
14900 * @opcode /5
14901 * @opflclass multiply
14902 */
14903FNIEMOP_DEF_1(iemOp_grp3_imul_Ev, uint8_t, bRm)
14904{
14905 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
14906 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14907 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
14908}
14909
14910
14911/**
14912 * @opmaps grp3_f7
14913 * @opcode /6
14914 * @opflclass division
14915 */
14916FNIEMOP_DEF_1(iemOp_grp3_div_Ev, uint8_t, bRm)
14917{
14918 IEMOP_MNEMONIC(div_Ev, "div Ev");
14919 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14920 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
14921}
14922
14923
14924/**
14925 * @opmaps grp3_f7
14926 * @opcode /7
14927 * @opflclass division
14928 */
14929FNIEMOP_DEF_1(iemOp_grp3_idiv_Ev, uint8_t, bRm)
14930{
14931 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
14932 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14933 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
14934}
14935
14936
14937/**
14938 * @opcode 0xf7
14939 */
14940FNIEMOP_DEF(iemOp_Grp3_Ev)
14941{
14942 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14943 switch (IEM_GET_MODRM_REG_8(bRm))
14944 {
14945 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
14946 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
14947 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
14948 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
14949 case 4: return FNIEMOP_CALL_1(iemOp_grp3_mul_Ev, bRm);
14950 case 5: return FNIEMOP_CALL_1(iemOp_grp3_imul_Ev, bRm);
14951 case 6: return FNIEMOP_CALL_1(iemOp_grp3_div_Ev, bRm);
14952 case 7: return FNIEMOP_CALL_1(iemOp_grp3_idiv_Ev, bRm);
14953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14954 }
14955}
14956
14957
14958/**
14959 * @opcode 0xf8
14960 * @opflmodify cf
14961 * @opflclear cf
14962 */
14963FNIEMOP_DEF(iemOp_clc)
14964{
14965 IEMOP_MNEMONIC(clc, "clc");
14966 IEM_MC_BEGIN(0, 0, 0, 0);
14967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14968 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
14969 IEM_MC_ADVANCE_RIP_AND_FINISH();
14970 IEM_MC_END();
14971}
14972
14973
14974/**
14975 * @opcode 0xf9
14976 * @opflmodify cf
14977 * @opflset cf
14978 */
14979FNIEMOP_DEF(iemOp_stc)
14980{
14981 IEMOP_MNEMONIC(stc, "stc");
14982 IEM_MC_BEGIN(0, 0, 0, 0);
14983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14984 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
14985 IEM_MC_ADVANCE_RIP_AND_FINISH();
14986 IEM_MC_END();
14987}
14988
14989
14990/**
14991 * @opcode 0xfa
14992 * @opfltest iopl,vm
14993 * @opflmodify if,vif
14994 */
14995FNIEMOP_DEF(iemOp_cli)
14996{
14997 IEMOP_MNEMONIC(cli, "cli");
14998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14999 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, 0, iemCImpl_cli);
15000}
15001
15002
15003/**
15004 * @opcode 0xfb
15005 * @opfltest iopl,vm
15006 * @opflmodify if,vif
15007 */
15008FNIEMOP_DEF(iemOp_sti)
15009{
15010 IEMOP_MNEMONIC(sti, "sti");
15011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15012 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
15013 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, 0, iemCImpl_sti);
15014}
15015
15016
15017/**
15018 * @opcode 0xfc
15019 * @opflmodify df
15020 * @opflclear df
15021 */
15022FNIEMOP_DEF(iemOp_cld)
15023{
15024 IEMOP_MNEMONIC(cld, "cld");
15025 IEM_MC_BEGIN(0, 0, 0, 0);
15026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15027 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
15028 IEM_MC_ADVANCE_RIP_AND_FINISH();
15029 IEM_MC_END();
15030}
15031
15032
15033/**
15034 * @opcode 0xfd
15035 * @opflmodify df
15036 * @opflset df
15037 */
15038FNIEMOP_DEF(iemOp_std)
15039{
15040 IEMOP_MNEMONIC(std, "std");
15041 IEM_MC_BEGIN(0, 0, 0, 0);
15042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15043 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
15044 IEM_MC_ADVANCE_RIP_AND_FINISH();
15045 IEM_MC_END();
15046}
15047
15048
15049/**
15050 * @opmaps grp4
15051 * @opcode /0
15052 * @opflclass incdec
15053 */
15054FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
15055{
15056 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
15057 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
15058}
15059
15060
15061/**
15062 * @opmaps grp4
15063 * @opcode /1
15064 * @opflclass incdec
15065 */
15066FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
15067{
15068 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
15069 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
15070}
15071
15072
15073/**
15074 * @opcode 0xfe
15075 */
15076FNIEMOP_DEF(iemOp_Grp4)
15077{
15078 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15079 switch (IEM_GET_MODRM_REG_8(bRm))
15080 {
15081 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
15082 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
15083 default:
15084 /** @todo is the eff-addr decoded? */
15085 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
15086 IEMOP_RAISE_INVALID_OPCODE_RET();
15087 }
15088}
15089
15090/**
15091 * @opmaps grp5
15092 * @opcode /0
15093 * @opflclass incdec
15094 */
15095FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
15096{
15097 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
15098 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
15099 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
15100}
15101
15102
15103/**
15104 * @opmaps grp5
15105 * @opcode /1
15106 * @opflclass incdec
15107 */
15108FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
15109{
15110 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
15111 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
15112 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
15113}
15114
15115
15116/**
15117 * Opcode 0xff /2.
15118 * @param bRm The RM byte.
15119 */
15120FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
15121{
15122 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
15123 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
15124
15125 if (IEM_IS_MODRM_REG_MODE(bRm))
15126 {
15127 /* The new RIP is taken from a register. */
15128 switch (pVCpu->iem.s.enmEffOpSize)
15129 {
15130 case IEMMODE_16BIT:
15131 IEM_MC_BEGIN(1, 0, 0, 0);
15132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15133 IEM_MC_ARG(uint16_t, u16Target, 0);
15134 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15135 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
15136 IEM_MC_END();
15137 break;
15138
15139 case IEMMODE_32BIT:
15140 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
15141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15142 IEM_MC_ARG(uint32_t, u32Target, 0);
15143 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15144 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
15145 IEM_MC_END();
15146 break;
15147
15148 case IEMMODE_64BIT:
15149 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
15150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15151 IEM_MC_ARG(uint64_t, u64Target, 0);
15152 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15153 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
15154 IEM_MC_END();
15155 break;
15156
15157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15158 }
15159 }
15160 else
15161 {
15162 /* The new RIP is taken from a register. */
15163 switch (pVCpu->iem.s.enmEffOpSize)
15164 {
15165 case IEMMODE_16BIT:
15166 IEM_MC_BEGIN(1, 1, 0, 0);
15167 IEM_MC_ARG(uint16_t, u16Target, 0);
15168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15171 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15172 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
15173 IEM_MC_END();
15174 break;
15175
15176 case IEMMODE_32BIT:
15177 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
15178 IEM_MC_ARG(uint32_t, u32Target, 0);
15179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15182 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15183 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
15184 IEM_MC_END();
15185 break;
15186
15187 case IEMMODE_64BIT:
15188 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT, 0);
15189 IEM_MC_ARG(uint64_t, u64Target, 0);
15190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15193 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15194 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
15195 IEM_MC_END();
15196 break;
15197
15198 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15199 }
15200 }
15201}
15202
15203#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl, a_fCImplExtra) \
15204 /* Registers? How?? */ \
15205 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
15206 { /* likely */ } \
15207 else \
15208 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
15209 \
15210 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
15211 /** @todo what does VIA do? */ \
15212 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
15213 { /* likely */ } \
15214 else \
15215 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
15216 \
15217 /* Far pointer loaded from memory. */ \
15218 switch (pVCpu->iem.s.enmEffOpSize) \
15219 { \
15220 case IEMMODE_16BIT: \
15221 IEM_MC_BEGIN(3, 1, 0, 0); \
15222 IEM_MC_ARG(uint16_t, u16Sel, 0); \
15223 IEM_MC_ARG(uint16_t, offSeg, 1); \
15224 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
15225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
15226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
15227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
15228 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
15229 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
15230 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
15231 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
15232 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
15233 IEM_MC_END(); \
15234 break; \
15235 \
15236 case IEMMODE_32BIT: \
15237 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
15238 IEM_MC_ARG(uint16_t, u16Sel, 0); \
15239 IEM_MC_ARG(uint32_t, offSeg, 1); \
15240 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
15241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
15242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
15243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
15244 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
15245 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
15246 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
15247 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
15248 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
15249 IEM_MC_END(); \
15250 break; \
15251 \
15252 case IEMMODE_64BIT: \
15253 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
15254 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0); \
15255 IEM_MC_ARG(uint16_t, u16Sel, 0); \
15256 IEM_MC_ARG(uint64_t, offSeg, 1); \
15257 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
15258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
15259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
15260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
15261 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
15262 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
15263 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
15264 | IEM_CIMPL_F_MODE /* no gates */, 0, \
15265 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
15266 IEM_MC_END(); \
15267 break; \
15268 \
15269 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
15270 } do {} while (0)
15271
15272
15273/**
15274 * Opcode 0xff /3.
15275 * @param bRm The RM byte.
15276 */
15277FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
15278{
15279 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
15280 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf, IEM_CIMPL_F_BRANCH_STACK);
15281}
15282
15283
15284/**
15285 * Opcode 0xff /4.
15286 * @param bRm The RM byte.
15287 */
15288FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
15289{
15290 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
15291 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
15292
15293 if (IEM_IS_MODRM_REG_MODE(bRm))
15294 {
15295 /* The new RIP is taken from a register. */
15296 switch (pVCpu->iem.s.enmEffOpSize)
15297 {
15298 case IEMMODE_16BIT:
15299 IEM_MC_BEGIN(0, 1, 0, 0);
15300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15301 IEM_MC_LOCAL(uint16_t, u16Target);
15302 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15303 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
15304 IEM_MC_END();
15305 break;
15306
15307 case IEMMODE_32BIT:
15308 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
15309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15310 IEM_MC_LOCAL(uint32_t, u32Target);
15311 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15312 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
15313 IEM_MC_END();
15314 break;
15315
15316 case IEMMODE_64BIT:
15317 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
15318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15319 IEM_MC_LOCAL(uint64_t, u64Target);
15320 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15321 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
15322 IEM_MC_END();
15323 break;
15324
15325 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15326 }
15327 }
15328 else
15329 {
15330 /* The new RIP is taken from a memory location. */
15331 switch (pVCpu->iem.s.enmEffOpSize)
15332 {
15333 case IEMMODE_16BIT:
15334 IEM_MC_BEGIN(0, 2, 0, 0);
15335 IEM_MC_LOCAL(uint16_t, u16Target);
15336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15339 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15340 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
15341 IEM_MC_END();
15342 break;
15343
15344 case IEMMODE_32BIT:
15345 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
15346 IEM_MC_LOCAL(uint32_t, u32Target);
15347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15350 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15351 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
15352 IEM_MC_END();
15353 break;
15354
15355 case IEMMODE_64BIT:
15356 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
15357 IEM_MC_LOCAL(uint64_t, u64Target);
15358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15361 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15362 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
15363 IEM_MC_END();
15364 break;
15365
15366 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15367 }
15368 }
15369}
15370
15371
15372/**
15373 * Opcode 0xff /5.
15374 * @param bRm The RM byte.
15375 */
15376FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
15377{
15378 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
15379 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp, 0);
15380}
15381
15382
15383/**
15384 * Opcode 0xff /6.
15385 * @param bRm The RM byte.
15386 */
15387FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
15388{
15389 IEMOP_MNEMONIC(push_Ev, "push Ev");
15390
15391 /* Registers are handled by a common worker. */
15392 if (IEM_IS_MODRM_REG_MODE(bRm))
15393 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
15394
15395 /* Memory we do here. */
15396 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15397 switch (pVCpu->iem.s.enmEffOpSize)
15398 {
15399 case IEMMODE_16BIT:
15400 IEM_MC_BEGIN(0, 2, 0, 0);
15401 IEM_MC_LOCAL(uint16_t, u16Src);
15402 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15405 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15406 IEM_MC_PUSH_U16(u16Src);
15407 IEM_MC_ADVANCE_RIP_AND_FINISH();
15408 IEM_MC_END();
15409 break;
15410
15411 case IEMMODE_32BIT:
15412 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
15413 IEM_MC_LOCAL(uint32_t, u32Src);
15414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15417 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15418 IEM_MC_PUSH_U32(u32Src);
15419 IEM_MC_ADVANCE_RIP_AND_FINISH();
15420 IEM_MC_END();
15421 break;
15422
15423 case IEMMODE_64BIT:
15424 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
15425 IEM_MC_LOCAL(uint64_t, u64Src);
15426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15429 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15430 IEM_MC_PUSH_U64(u64Src);
15431 IEM_MC_ADVANCE_RIP_AND_FINISH();
15432 IEM_MC_END();
15433 break;
15434
15435 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15436 }
15437}
15438
15439
15440/**
15441 * @opcode 0xff
15442 */
15443FNIEMOP_DEF(iemOp_Grp5)
15444{
15445 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15446 switch (IEM_GET_MODRM_REG_8(bRm))
15447 {
15448 case 0:
15449 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
15450 case 1:
15451 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
15452 case 2:
15453 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
15454 case 3:
15455 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
15456 case 4:
15457 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
15458 case 5:
15459 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
15460 case 6:
15461 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
15462 case 7:
15463 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
15464 IEMOP_RAISE_INVALID_OPCODE_RET();
15465 }
15466 AssertFailedReturn(VERR_IEM_IPE_3);
15467}
15468
15469
15470
15471const PFNIEMOP g_apfnOneByteMap[256] =
15472{
15473 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
15474 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
15475 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
15476 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
15477 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
15478 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
15479 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
15480 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
15481 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
15482 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
15483 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
15484 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
15485 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
15486 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
15487 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
15488 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
15489 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
15490 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
15491 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
15492 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
15493 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
15494 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
15495 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
15496 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
15497 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
15498 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
15499 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
15500 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
15501 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
15502 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
15503 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
15504 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
15505 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
15506 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
15507 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
15508 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
15509 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
15510 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
15511 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
15512 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
15513 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
15514 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
15515 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
15516 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
15517 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
15518 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
15519 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
15520 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
15521 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
15522 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
15523 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
15524 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
15525 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
15526 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
15527 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
15528 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
15529 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
15530 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
15531 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
15532 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
15533 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
15534 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
15535 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
15536 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
15537};
15538
15539
15540/** @} */
15541
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette