VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 103756

Last change on this file since 103756 was 103744, checked in by vboxsync, 10 months ago

VMM/IEM: Implemented iemNativeEmit_adc_r_i_efl and enabled it for both hosts. bugref:10376

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 588.3 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 103744 2024-03-09 02:52:46Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Special case body for bytes instruction like SUB and XOR that can be used
61 * to zero a register.
62 *
63 * This can be used both for the r8_rm and rm_r8 forms since it's working on the
64 * same register.
65 */
66#define IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(a_bRm) \
67 if ( (a_bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT)) \
68 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB) \
69 { \
70 IEM_MC_BEGIN(0, 1, 0, 0); \
71 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
72 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_REG(pVCpu, a_bRm), 0); \
73 IEM_MC_LOCAL(uint32_t, fEFlags); \
74 IEM_MC_FETCH_EFLAGS(fEFlags); \
75 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS); \
76 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF); \
77 IEM_MC_COMMIT_EFLAGS(fEFlags); \
78 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
79 IEM_MC_END(); \
80 } ((void)0)
81
82/**
83 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
84 * memory/register as the destination.
85 */
86#define IEMOP_BODY_BINARY_rm_r8_RW(a_bRm, a_InsNm, a_fRegRegNativeArchs, a_fMemRegNativeArchs) \
87 /* \
88 * If rm is denoting a register, no more instruction bytes. \
89 */ \
90 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
91 { \
92 IEM_MC_BEGIN(3, 0, 0, 0); \
93 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
94 IEM_MC_ARG(uint8_t, u8Src, 1); \
95 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
96 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
97 IEM_MC_LOCAL(uint8_t, u8Dst); \
98 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
99 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
100 IEM_MC_LOCAL(uint32_t, uEFlags); \
101 IEM_MC_FETCH_EFLAGS(uEFlags); \
102 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
103 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, a_bRm), u8Dst); \
104 IEM_MC_COMMIT_EFLAGS(uEFlags); \
105 } IEM_MC_NATIVE_ELSE() { \
106 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
107 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
108 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
109 IEM_MC_REF_EFLAGS(pEFlags); \
110 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
111 } IEM_MC_NATIVE_ENDIF(); \
112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
113 IEM_MC_END(); \
114 } \
115 else \
116 { \
117 /* \
118 * We're accessing memory. \
119 * Note! We're putting the eflags on the stack here so we can commit them \
120 * after the memory. \
121 */ \
122 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
123 { \
124 IEM_MC_BEGIN(3, 3, 0, 0); \
125 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
126 IEM_MC_ARG(uint8_t, u8Src, 1); \
127 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
129 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
130 \
131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
132 IEMOP_HLP_DONE_DECODING(); \
133 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
134 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
135 IEM_MC_FETCH_EFLAGS(EFlags); \
136 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
137 \
138 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
139 IEM_MC_COMMIT_EFLAGS(EFlags); \
140 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
141 IEM_MC_END(); \
142 } \
143 else \
144 { \
145 IEM_MC_BEGIN(3, 3, 0, 0); \
146 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
147 IEM_MC_ARG(uint8_t, u8Src, 1); \
148 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
150 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
151 \
152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
153 IEMOP_HLP_DONE_DECODING(); \
154 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
155 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
156 IEM_MC_FETCH_EFLAGS(EFlags); \
157 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8_locked), pu8Dst, u8Src, pEFlags); \
158 \
159 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bMapInfoDst); \
160 IEM_MC_COMMIT_EFLAGS(EFlags); \
161 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
162 IEM_MC_END(); \
163 } \
164 } \
165 (void)0
166
167/**
168 * Body for instructions like TEST & CMP with a byte memory/registers as
169 * operands.
170 */
171#define IEMOP_BODY_BINARY_rm_r8_RO(a_bRm, a_fnNormalU8, a_EmitterBasename, a_fNativeArchs) \
172 /* \
173 * If rm is denoting a register, no more instruction bytes. \
174 */ \
175 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
176 { \
177 IEM_MC_BEGIN(3, 0, 0, 0); \
178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
179 IEM_MC_ARG(uint8_t, u8Src, 1); \
180 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
181 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
182 IEM_MC_LOCAL(uint8_t, u8Dst); \
183 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
184 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
185 IEM_MC_LOCAL(uint32_t, uEFlags); \
186 IEM_MC_FETCH_EFLAGS(uEFlags); \
187 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
188 IEM_MC_COMMIT_EFLAGS(uEFlags); \
189 } IEM_MC_NATIVE_ELSE() { \
190 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
191 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
192 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
193 IEM_MC_REF_EFLAGS(pEFlags); \
194 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
195 } IEM_MC_NATIVE_ENDIF(); \
196 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
197 IEM_MC_END(); \
198 } \
199 else \
200 { \
201 /* \
202 * We're accessing memory. \
203 * Note! We're putting the eflags on the stack here so we can commit them \
204 * after the memory. \
205 */ \
206 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
207 { \
208 IEM_MC_BEGIN(3, 3, 0, 0); \
209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
211 IEMOP_HLP_DONE_DECODING(); \
212 IEM_MC_NATIVE_IF(0) { \
213 IEM_MC_LOCAL(uint8_t, u8Dst); \
214 IEM_MC_FETCH_MEM_U8(u8Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
215 IEM_MC_LOCAL(uint8_t, u8SrcEmit); \
216 IEM_MC_FETCH_GREG_U8(u8SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
217 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
218 IEM_MC_LOCAL(uint32_t, uEFlags); \
219 IEM_MC_FETCH_EFLAGS(uEFlags); \
220 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u8Dst, u8SrcEmit, uEFlags, 8); \
221 IEM_MC_COMMIT_EFLAGS(uEFlags); \
222 } IEM_MC_NATIVE_ELSE() { \
223 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
224 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
225 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
226 IEM_MC_ARG(uint8_t, u8Src, 1); \
227 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
228 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
229 IEM_MC_FETCH_EFLAGS(EFlags); \
230 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
231 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
232 IEM_MC_COMMIT_EFLAGS(EFlags); \
233 } IEM_MC_NATIVE_ENDIF(); \
234 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
235 IEM_MC_END(); \
236 } \
237 else \
238 { \
239 /** @todo we should probably decode the address first. */ \
240 IEMOP_HLP_DONE_DECODING(); \
241 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
242 } \
243 } \
244 (void)0
245
246/**
247 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
248 * destination.
249 */
250#define IEMOP_BODY_BINARY_r8_rm(a_bRm, a_InsNm, a_fNativeArchs) \
251 /* \
252 * If rm is denoting a register, no more instruction bytes. \
253 */ \
254 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
255 { \
256 IEM_MC_BEGIN(3, 0, 0, 0); \
257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
258 IEM_MC_ARG(uint8_t, u8Src, 1); \
259 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
260 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
261 IEM_MC_LOCAL(uint8_t, u8Dst); \
262 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
263 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
264 IEM_MC_LOCAL(uint32_t, uEFlags); \
265 IEM_MC_FETCH_EFLAGS(uEFlags); \
266 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
267 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, a_bRm), u8Dst); \
268 IEM_MC_COMMIT_EFLAGS(uEFlags); \
269 } IEM_MC_NATIVE_ELSE() { \
270 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
271 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
272 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
273 IEM_MC_REF_EFLAGS(pEFlags); \
274 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
275 } IEM_MC_NATIVE_ENDIF(); \
276 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
277 IEM_MC_END(); \
278 } \
279 else \
280 { \
281 /* \
282 * We're accessing memory. \
283 */ \
284 IEM_MC_BEGIN(3, 1, 0, 0); \
285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
288 IEM_MC_ARG(uint8_t, u8Src, 1); \
289 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
290 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
291 IEM_MC_LOCAL(uint8_t, u8Dst); \
292 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
293 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
294 IEM_MC_LOCAL(uint32_t, uEFlags); \
295 IEM_MC_FETCH_EFLAGS(uEFlags); \
296 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
297 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, a_bRm), u8Dst); \
298 IEM_MC_COMMIT_EFLAGS(uEFlags); \
299 } IEM_MC_NATIVE_ELSE() { \
300 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
301 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
302 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
303 IEM_MC_REF_EFLAGS(pEFlags); \
304 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
305 } IEM_MC_NATIVE_ENDIF(); \
306 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
307 IEM_MC_END(); \
308 } \
309 (void)0
310
311/**
312 * Body for byte instruction CMP with a register as the destination.
313 */
314#define IEMOP_BODY_BINARY_r8_rm_RO(a_bRm, a_InsNm, a_fNativeArchs) \
315 /* \
316 * If rm is denoting a register, no more instruction bytes. \
317 */ \
318 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
319 { \
320 IEM_MC_BEGIN(3, 0, 0, 0); \
321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
322 IEM_MC_ARG(uint8_t, u8Src, 1); \
323 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
324 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
325 IEM_MC_LOCAL(uint8_t, u8Dst); \
326 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
327 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
328 IEM_MC_LOCAL(uint32_t, uEFlags); \
329 IEM_MC_FETCH_EFLAGS(uEFlags); \
330 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
331 IEM_MC_COMMIT_EFLAGS(uEFlags); \
332 } IEM_MC_NATIVE_ELSE() { \
333 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
334 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
335 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
336 IEM_MC_REF_EFLAGS(pEFlags); \
337 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
338 } IEM_MC_NATIVE_ENDIF(); \
339 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
340 IEM_MC_END(); \
341 } \
342 else \
343 { \
344 /* \
345 * We're accessing memory. \
346 */ \
347 IEM_MC_BEGIN(3, 1, 0, 0); \
348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
351 IEM_MC_ARG(uint8_t, u8Src, 1); \
352 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
353 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
354 IEM_MC_LOCAL(uint8_t, u8Dst); \
355 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
356 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
357 IEM_MC_LOCAL(uint32_t, uEFlags); \
358 IEM_MC_FETCH_EFLAGS(uEFlags); \
359 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
360 IEM_MC_COMMIT_EFLAGS(uEFlags); \
361 } IEM_MC_NATIVE_ELSE() { \
362 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
363 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
364 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
365 IEM_MC_REF_EFLAGS(pEFlags); \
366 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
367 } IEM_MC_NATIVE_ENDIF(); \
368 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
369 IEM_MC_END(); \
370 } \
371 (void)0
372
373
374/**
375 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
376 * memory/register as the destination.
377 */
378#define IEMOP_BODY_BINARY_rm_rv_RW(a_bRm, a_InsNm, a_fRegRegNativeArchs, a_fMemRegNativeArchs) \
379 /* \
380 * If rm is denoting a register, no more instruction bytes. \
381 */ \
382 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
383 { \
384 switch (pVCpu->iem.s.enmEffOpSize) \
385 { \
386 case IEMMODE_16BIT: \
387 IEM_MC_BEGIN(3, 0, 0, 0); \
388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
389 IEM_MC_ARG(uint16_t, u16Src, 1); \
390 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
391 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
392 IEM_MC_LOCAL(uint16_t, u16Dst); \
393 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
394 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
395 IEM_MC_LOCAL(uint32_t, uEFlags); \
396 IEM_MC_FETCH_EFLAGS(uEFlags); \
397 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
398 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, a_bRm), u16Dst); \
399 IEM_MC_COMMIT_EFLAGS(uEFlags); \
400 } IEM_MC_NATIVE_ELSE() { \
401 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
402 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
403 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
404 IEM_MC_REF_EFLAGS(pEFlags); \
405 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
406 } IEM_MC_NATIVE_ENDIF(); \
407 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
408 IEM_MC_END(); \
409 break; \
410 \
411 case IEMMODE_32BIT: \
412 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
414 IEM_MC_ARG(uint32_t, u32Src, 1); \
415 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
416 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
417 IEM_MC_LOCAL(uint32_t, u32Dst); \
418 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
419 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
420 IEM_MC_LOCAL(uint32_t, uEFlags); \
421 IEM_MC_FETCH_EFLAGS(uEFlags); \
422 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
423 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, a_bRm), u32Dst); \
424 IEM_MC_COMMIT_EFLAGS(uEFlags); \
425 } IEM_MC_NATIVE_ELSE() { \
426 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
427 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
428 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
429 IEM_MC_REF_EFLAGS(pEFlags); \
430 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
431 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
432 } IEM_MC_NATIVE_ENDIF(); \
433 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
434 IEM_MC_END(); \
435 break; \
436 \
437 case IEMMODE_64BIT: \
438 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
440 IEM_MC_ARG(uint64_t, u64Src, 1); \
441 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
442 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
443 IEM_MC_LOCAL(uint64_t, u64Dst); \
444 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
445 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
446 IEM_MC_LOCAL(uint32_t, uEFlags); \
447 IEM_MC_FETCH_EFLAGS(uEFlags); \
448 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
449 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, a_bRm), u64Dst); \
450 IEM_MC_COMMIT_EFLAGS(uEFlags); \
451 } IEM_MC_NATIVE_ELSE() { \
452 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
453 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
454 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
455 IEM_MC_REF_EFLAGS(pEFlags); \
456 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
457 } IEM_MC_NATIVE_ENDIF(); \
458 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
459 IEM_MC_END(); \
460 break; \
461 \
462 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
463 } \
464 } \
465 else \
466 { \
467 /* \
468 * We're accessing memory. \
469 * Note! We're putting the eflags on the stack here so we can commit them \
470 * after the memory. \
471 */ \
472 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
473 { \
474 switch (pVCpu->iem.s.enmEffOpSize) \
475 { \
476 case IEMMODE_16BIT: \
477 IEM_MC_BEGIN(3, 3, 0, 0); \
478 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
479 IEM_MC_ARG(uint16_t, u16Src, 1); \
480 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
482 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
483 \
484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
485 IEMOP_HLP_DONE_DECODING(); \
486 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
487 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
488 IEM_MC_FETCH_EFLAGS(EFlags); \
489 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
490 \
491 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
492 IEM_MC_COMMIT_EFLAGS(EFlags); \
493 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
494 IEM_MC_END(); \
495 break; \
496 \
497 case IEMMODE_32BIT: \
498 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
499 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
500 IEM_MC_ARG(uint32_t, u32Src, 1); \
501 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
503 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
504 \
505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
506 IEMOP_HLP_DONE_DECODING(); \
507 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
508 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
509 IEM_MC_FETCH_EFLAGS(EFlags); \
510 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
511 \
512 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
513 IEM_MC_COMMIT_EFLAGS(EFlags); \
514 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
515 IEM_MC_END(); \
516 break; \
517 \
518 case IEMMODE_64BIT: \
519 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
520 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
521 IEM_MC_ARG(uint64_t, u64Src, 1); \
522 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
524 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
525 \
526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
527 IEMOP_HLP_DONE_DECODING(); \
528 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
529 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
530 IEM_MC_FETCH_EFLAGS(EFlags); \
531 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
532 \
533 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
534 IEM_MC_COMMIT_EFLAGS(EFlags); \
535 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
536 IEM_MC_END(); \
537 break; \
538 \
539 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
540 } \
541 } \
542 else \
543 { \
544 (void)0
545/* Separate macro to work around parsing issue in IEMAllInstPython.py */
546#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_bRm, a_InsNm) \
547 switch (pVCpu->iem.s.enmEffOpSize) \
548 { \
549 case IEMMODE_16BIT: \
550 IEM_MC_BEGIN(3, 3, 0, 0); \
551 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
552 IEM_MC_ARG(uint16_t, u16Src, 1); \
553 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
555 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
556 \
557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
558 IEMOP_HLP_DONE_DECODING(); \
559 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
560 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
561 IEM_MC_FETCH_EFLAGS(EFlags); \
562 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16_locked), pu16Dst, u16Src, pEFlags); \
563 \
564 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
565 IEM_MC_COMMIT_EFLAGS(EFlags); \
566 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
567 IEM_MC_END(); \
568 break; \
569 \
570 case IEMMODE_32BIT: \
571 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
572 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
573 IEM_MC_ARG(uint32_t, u32Src, 1); \
574 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
576 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
577 \
578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
579 IEMOP_HLP_DONE_DECODING(); \
580 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
581 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
582 IEM_MC_FETCH_EFLAGS(EFlags); \
583 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32_locked), pu32Dst, u32Src, pEFlags); \
584 \
585 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo /* CMP,TEST */); \
586 IEM_MC_COMMIT_EFLAGS(EFlags); \
587 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
588 IEM_MC_END(); \
589 break; \
590 \
591 case IEMMODE_64BIT: \
592 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
593 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
594 IEM_MC_ARG(uint64_t, u64Src, 1); \
595 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
597 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
598 \
599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
600 IEMOP_HLP_DONE_DECODING(); \
601 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
602 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
603 IEM_MC_FETCH_EFLAGS(EFlags); \
604 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64_locked), pu64Dst, u64Src, pEFlags); \
605 \
606 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
607 IEM_MC_COMMIT_EFLAGS(EFlags); \
608 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
609 IEM_MC_END(); \
610 break; \
611 \
612 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
613 } \
614 } \
615 } \
616 (void)0
617
618/**
619 * Body for read-only word/dword/qword instructions like TEST and CMP with
620 * memory/register as the destination.
621 */
622#define IEMOP_BODY_BINARY_rm_rv_RO(a_bRm, a_InsNm, a_fNativeArchs) \
623 /* \
624 * If rm is denoting a register, no more instruction bytes. \
625 */ \
626 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
627 { \
628 switch (pVCpu->iem.s.enmEffOpSize) \
629 { \
630 case IEMMODE_16BIT: \
631 IEM_MC_BEGIN(3, 0, 0, 0); \
632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
633 IEM_MC_ARG(uint16_t, u16Src, 1); \
634 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
635 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
636 IEM_MC_LOCAL(uint16_t, u16Dst); \
637 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
638 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
639 IEM_MC_LOCAL(uint32_t, uEFlags); \
640 IEM_MC_FETCH_EFLAGS(uEFlags); \
641 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
642 IEM_MC_COMMIT_EFLAGS(uEFlags); \
643 } IEM_MC_NATIVE_ELSE() { \
644 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
645 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
646 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
647 IEM_MC_REF_EFLAGS(pEFlags); \
648 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
649 } IEM_MC_NATIVE_ENDIF(); \
650 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
651 IEM_MC_END(); \
652 break; \
653 \
654 case IEMMODE_32BIT: \
655 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
657 IEM_MC_ARG(uint32_t, u32Src, 1); \
658 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
659 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
660 IEM_MC_LOCAL(uint32_t, u32Dst); \
661 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
662 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
663 IEM_MC_LOCAL(uint32_t, uEFlags); \
664 IEM_MC_FETCH_EFLAGS(uEFlags); \
665 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
666 IEM_MC_COMMIT_EFLAGS(uEFlags); \
667 } IEM_MC_NATIVE_ELSE() { \
668 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
669 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
670 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
671 IEM_MC_REF_EFLAGS(pEFlags); \
672 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
673 } IEM_MC_NATIVE_ENDIF(); \
674 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
675 IEM_MC_END(); \
676 break; \
677 \
678 case IEMMODE_64BIT: \
679 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
681 IEM_MC_ARG(uint64_t, u64Src, 1); \
682 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
683 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
684 IEM_MC_LOCAL(uint64_t, u64Dst); \
685 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
686 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
687 IEM_MC_LOCAL(uint32_t, uEFlags); \
688 IEM_MC_FETCH_EFLAGS(uEFlags); \
689 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
690 IEM_MC_COMMIT_EFLAGS(uEFlags); \
691 } IEM_MC_NATIVE_ELSE() { \
692 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
693 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
694 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
695 IEM_MC_REF_EFLAGS(pEFlags); \
696 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
697 } IEM_MC_NATIVE_ENDIF(); \
698 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
699 IEM_MC_END(); \
700 break; \
701 \
702 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
703 } \
704 } \
705 else \
706 { \
707 /* \
708 * We're accessing memory. \
709 * Note! We're putting the eflags on the stack here so we can commit them \
710 * after the memory. \
711 */ \
712 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
713 { \
714 switch (pVCpu->iem.s.enmEffOpSize) \
715 { \
716 case IEMMODE_16BIT: \
717 IEM_MC_BEGIN(3, 3, 0, 0); \
718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
720 IEMOP_HLP_DONE_DECODING(); \
721 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
722 IEM_MC_LOCAL(uint16_t, u16Dst); \
723 IEM_MC_FETCH_MEM_U16(u16Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
724 IEM_MC_LOCAL(uint16_t, u16SrcEmit); \
725 IEM_MC_FETCH_GREG_U16(u16SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
726 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
727 IEM_MC_LOCAL(uint32_t, uEFlags); \
728 IEM_MC_FETCH_EFLAGS(uEFlags); \
729 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u16Dst, u16SrcEmit, uEFlags, 16); \
730 IEM_MC_COMMIT_EFLAGS(uEFlags); \
731 } IEM_MC_NATIVE_ELSE() { \
732 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
733 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
734 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
735 IEM_MC_ARG(uint16_t, u16Src, 1); \
736 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
737 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
738 IEM_MC_FETCH_EFLAGS(EFlags); \
739 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
740 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
741 IEM_MC_COMMIT_EFLAGS(EFlags); \
742 } IEM_MC_NATIVE_ENDIF(); \
743 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
744 IEM_MC_END(); \
745 break; \
746 \
747 case IEMMODE_32BIT: \
748 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
751 IEMOP_HLP_DONE_DECODING(); \
752 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
753 IEM_MC_LOCAL(uint32_t, u32Dst); \
754 IEM_MC_FETCH_MEM_U32(u32Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
755 IEM_MC_LOCAL(uint32_t, u32SrcEmit); \
756 IEM_MC_FETCH_GREG_U32(u32SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
757 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
758 IEM_MC_LOCAL(uint32_t, uEFlags); \
759 IEM_MC_FETCH_EFLAGS(uEFlags); \
760 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u32Dst, u32SrcEmit, uEFlags, 32); \
761 IEM_MC_COMMIT_EFLAGS(uEFlags); \
762 } IEM_MC_NATIVE_ELSE() { \
763 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
764 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
765 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
766 IEM_MC_ARG(uint32_t, u32Src, 1); \
767 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
768 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
769 IEM_MC_FETCH_EFLAGS(EFlags); \
770 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
771 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
772 IEM_MC_COMMIT_EFLAGS(EFlags); \
773 } IEM_MC_NATIVE_ENDIF(); \
774 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
775 IEM_MC_END(); \
776 break; \
777 \
778 case IEMMODE_64BIT: \
779 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
782 IEMOP_HLP_DONE_DECODING(); \
783 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
784 IEM_MC_LOCAL(uint64_t, u64Dst); \
785 IEM_MC_FETCH_MEM_U64(u64Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
786 IEM_MC_LOCAL(uint64_t, u64SrcEmit); \
787 IEM_MC_FETCH_GREG_U64(u64SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
788 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
789 IEM_MC_LOCAL(uint32_t, uEFlags); \
790 IEM_MC_FETCH_EFLAGS(uEFlags); \
791 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u64Dst, u64SrcEmit, uEFlags, 64); \
792 IEM_MC_COMMIT_EFLAGS(uEFlags); \
793 } IEM_MC_NATIVE_ELSE() { \
794 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
795 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
796 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
797 IEM_MC_ARG(uint64_t, u64Src, 1); \
798 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
799 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
800 IEM_MC_FETCH_EFLAGS(EFlags); \
801 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
802 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
803 IEM_MC_COMMIT_EFLAGS(EFlags); \
804 } IEM_MC_NATIVE_ENDIF(); \
805 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
806 IEM_MC_END(); \
807 break; \
808 \
809 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
810 } \
811 } \
812 else \
813 { \
814 IEMOP_HLP_DONE_DECODING(); \
815 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
816 } \
817 } \
818 (void)0
819
820
821/**
822 * Body for instructions like ADD, AND, OR, ++ with working on AL with
823 * a byte immediate.
824 */
825#define IEMOP_BODY_BINARY_AL_Ib(a_InsNm, a_fNativeArchs) \
826 IEM_MC_BEGIN(3, 3, 0, 0); \
827 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
829 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
830 IEM_MC_LOCAL(uint8_t, u8Dst); \
831 IEM_MC_FETCH_GREG_U8(u8Dst, X86_GREG_xAX); \
832 IEM_MC_LOCAL(uint32_t, uEFlags); \
833 IEM_MC_FETCH_EFLAGS(uEFlags); \
834 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u8Dst, u8Imm, uEFlags, 8, 8); \
835 IEM_MC_COMMIT_EFLAGS(uEFlags); \
836 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Dst); \
837 } IEM_MC_NATIVE_ELSE() { \
838 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
839 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
840 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
841 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
842 IEM_MC_REF_EFLAGS(pEFlags); \
843 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
844 } IEM_MC_NATIVE_ENDIF(); \
845 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
846 IEM_MC_END()
847
848/**
849 * Body for instructions like ADD, AND, OR, ++ with working on
850 * AX/EAX/RAX with a word/dword immediate.
851 */
852#define IEMOP_BODY_BINARY_rAX_Iz_RW(a_InsNm, a_fNativeArchs) \
853 switch (pVCpu->iem.s.enmEffOpSize) \
854 { \
855 case IEMMODE_16BIT: \
856 { \
857 IEM_MC_BEGIN(3, 2, 0, 0); \
858 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
860 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
861 IEM_MC_LOCAL(uint16_t, u16Dst); \
862 IEM_MC_FETCH_GREG_U16(u16Dst, X86_GREG_xAX); \
863 IEM_MC_LOCAL(uint32_t, uEFlags); \
864 IEM_MC_FETCH_EFLAGS(uEFlags); \
865 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
866 IEM_MC_COMMIT_EFLAGS(uEFlags); \
867 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Dst); \
868 } IEM_MC_NATIVE_ELSE() { \
869 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
870 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
871 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
872 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
873 IEM_MC_REF_EFLAGS(pEFlags); \
874 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
875 } IEM_MC_NATIVE_ENDIF(); \
876 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
877 IEM_MC_END(); \
878 } \
879 \
880 case IEMMODE_32BIT: \
881 { \
882 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0); \
883 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
885 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
886 IEM_MC_LOCAL(uint32_t, u32Dst); \
887 IEM_MC_FETCH_GREG_U32(u32Dst, X86_GREG_xAX); \
888 IEM_MC_LOCAL(uint32_t, uEFlags); \
889 IEM_MC_FETCH_EFLAGS(uEFlags); \
890 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
891 IEM_MC_COMMIT_EFLAGS(uEFlags); \
892 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Dst); \
893 } IEM_MC_NATIVE_ELSE() { \
894 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
895 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
896 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
897 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
898 IEM_MC_REF_EFLAGS(pEFlags); \
899 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
900 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
901 } IEM_MC_NATIVE_ENDIF(); \
902 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
903 IEM_MC_END(); \
904 } \
905 \
906 case IEMMODE_64BIT: \
907 { \
908 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0); \
909 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
911 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
912 IEM_MC_LOCAL(uint64_t, u64Dst); \
913 IEM_MC_FETCH_GREG_U64(u64Dst, X86_GREG_xAX); \
914 IEM_MC_LOCAL(uint32_t, uEFlags); \
915 IEM_MC_FETCH_EFLAGS(uEFlags); \
916 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
917 IEM_MC_COMMIT_EFLAGS(uEFlags); \
918 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Dst); \
919 } IEM_MC_NATIVE_ELSE() { \
920 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
921 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
922 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
923 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
924 IEM_MC_REF_EFLAGS(pEFlags); \
925 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
926 } IEM_MC_NATIVE_ENDIF(); \
927 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
928 IEM_MC_END(); \
929 } \
930 \
931 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
932 } \
933 (void)0
934
935/**
936 * Body for the instructions CMP and TEST working on AX/EAX/RAX with a
937 * word/dword immediate.
938 */
939#define IEMOP_BODY_BINARY_rAX_Iz_RO(a_InsNm, a_fNativeArchs) \
940 switch (pVCpu->iem.s.enmEffOpSize) \
941 { \
942 case IEMMODE_16BIT: \
943 { \
944 IEM_MC_BEGIN(3, 2, 0, 0); \
945 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
947 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
948 IEM_MC_LOCAL(uint16_t, u16Dst); \
949 IEM_MC_FETCH_GREG_U16(u16Dst, X86_GREG_xAX); \
950 IEM_MC_LOCAL(uint32_t, uEFlags); \
951 IEM_MC_FETCH_EFLAGS(uEFlags); \
952 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
953 IEM_MC_COMMIT_EFLAGS(uEFlags); \
954 } IEM_MC_NATIVE_ELSE() { \
955 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
956 IEM_MC_ARG(uint16_t const *,pu16Dst, 0); \
957 IEM_MC_REF_GREG_U16_CONST(pu16Dst, X86_GREG_xAX); \
958 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
959 IEM_MC_REF_EFLAGS(pEFlags); \
960 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
961 } IEM_MC_NATIVE_ENDIF(); \
962 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
963 IEM_MC_END(); \
964 } \
965 \
966 case IEMMODE_32BIT: \
967 { \
968 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0); \
969 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
971 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
972 IEM_MC_LOCAL(uint32_t, u32Dst); \
973 IEM_MC_FETCH_GREG_U32(u32Dst, X86_GREG_xAX); \
974 IEM_MC_LOCAL(uint32_t, uEFlags); \
975 IEM_MC_FETCH_EFLAGS(uEFlags); \
976 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
977 IEM_MC_COMMIT_EFLAGS(uEFlags); \
978 } IEM_MC_NATIVE_ELSE() { \
979 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
980 IEM_MC_ARG(uint32_t const *,pu32Dst, 0); \
981 IEM_MC_REF_GREG_U32_CONST(pu32Dst, X86_GREG_xAX); \
982 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
983 IEM_MC_REF_EFLAGS(pEFlags); \
984 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
985 } IEM_MC_NATIVE_ENDIF(); \
986 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
987 IEM_MC_END(); \
988 } \
989 \
990 case IEMMODE_64BIT: \
991 { \
992 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0); \
993 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
995 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
996 IEM_MC_LOCAL(uint64_t, u64Dst); \
997 IEM_MC_FETCH_GREG_U64(u64Dst, X86_GREG_xAX); \
998 IEM_MC_LOCAL(uint32_t, uEFlags); \
999 IEM_MC_FETCH_EFLAGS(uEFlags); \
1000 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
1001 IEM_MC_COMMIT_EFLAGS(uEFlags); \
1002 } IEM_MC_NATIVE_ELSE() { \
1003 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
1004 IEM_MC_ARG(uint64_t const *,pu64Dst, 0); \
1005 IEM_MC_REF_GREG_U64_CONST(pu64Dst, X86_GREG_xAX); \
1006 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
1007 IEM_MC_REF_EFLAGS(pEFlags); \
1008 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
1009 } IEM_MC_NATIVE_ENDIF(); \
1010 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1011 IEM_MC_END(); \
1012 } \
1013 \
1014 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1015 } \
1016 (void)0
1017
1018
1019
1020/* Instruction specification format - work in progress: */
1021
1022/**
1023 * @opcode 0x00
1024 * @opmnemonic add
1025 * @op1 rm:Eb
1026 * @op2 reg:Gb
1027 * @opmaps one
1028 * @openc ModR/M
1029 * @opflclass arithmetic
1030 * @ophints harmless ignores_op_sizes
1031 * @opstats add_Eb_Gb
1032 * @opgroup og_gen_arith_bin
1033 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1034 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
1035 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
1036 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
1037 */
1038FNIEMOP_DEF(iemOp_add_Eb_Gb)
1039{
1040 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1041 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1042 IEMOP_BODY_BINARY_rm_r8_RW(bRm, add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1043}
1044
1045
1046/**
1047 * @opcode 0x01
1048 * @opgroup og_gen_arith_bin
1049 * @opflclass arithmetic
1050 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1051 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
1052 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
1053 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
1054 */
1055FNIEMOP_DEF(iemOp_add_Ev_Gv)
1056{
1057 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1058 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1059 IEMOP_BODY_BINARY_rm_rv_RW( bRm, add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1060 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, add);
1061}
1062
1063
1064/**
1065 * @opcode 0x02
1066 * @opgroup og_gen_arith_bin
1067 * @opflclass arithmetic
1068 * @opcopytests iemOp_add_Eb_Gb
1069 */
1070FNIEMOP_DEF(iemOp_add_Gb_Eb)
1071{
1072 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1073 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1074 IEMOP_BODY_BINARY_r8_rm(bRm, add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1075}
1076
1077
1078/**
1079 * @opcode 0x03
1080 * @opgroup og_gen_arith_bin
1081 * @opflclass arithmetic
1082 * @opcopytests iemOp_add_Ev_Gv
1083 */
1084FNIEMOP_DEF(iemOp_add_Gv_Ev)
1085{
1086 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1087 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1088 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 0, add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1089}
1090
1091
1092/**
1093 * @opcode 0x04
1094 * @opgroup og_gen_arith_bin
1095 * @opflclass arithmetic
1096 * @opcopytests iemOp_add_Eb_Gb
1097 */
1098FNIEMOP_DEF(iemOp_add_Al_Ib)
1099{
1100 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1101 IEMOP_BODY_BINARY_AL_Ib(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1102}
1103
1104
1105/**
1106 * @opcode 0x05
1107 * @opgroup og_gen_arith_bin
1108 * @opflclass arithmetic
1109 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
1110 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
1111 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
1112 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
1113 */
1114FNIEMOP_DEF(iemOp_add_eAX_Iz)
1115{
1116 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1117 IEMOP_BODY_BINARY_rAX_Iz_RW(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1118}
1119
1120
1121/**
1122 * @opcode 0x06
1123 * @opgroup og_stack_sreg
1124 */
1125FNIEMOP_DEF(iemOp_push_ES)
1126{
1127 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1128 IEMOP_HLP_NO_64BIT();
1129 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
1130}
1131
1132
1133/**
1134 * @opcode 0x07
1135 * @opgroup og_stack_sreg
1136 */
1137FNIEMOP_DEF(iemOp_pop_ES)
1138{
1139 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1140 IEMOP_HLP_NO_64BIT();
1141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1142 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1143 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1144 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
1145 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
1146 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
1147 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES),
1148 iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
1149}
1150
1151
1152/**
1153 * @opcode 0x08
1154 * @opgroup og_gen_arith_bin
1155 * @opflclass logical
1156 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
1157 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
1158 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
1159 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
1160 */
1161FNIEMOP_DEF(iemOp_or_Eb_Gb)
1162{
1163 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1164 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1166 IEMOP_BODY_BINARY_rm_r8_RW(bRm, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1167}
1168
1169
1170/*
1171 * @opcode 0x09
1172 * @opgroup og_gen_arith_bin
1173 * @opflclass logical
1174 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
1175 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
1176 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1177 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1178 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1179 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1180 * @note AF is documented as undefined, but both modern AMD and Intel CPUs clears it.
1181 */
1182FNIEMOP_DEF(iemOp_or_Ev_Gv)
1183{
1184 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1185 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1186 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1187 IEMOP_BODY_BINARY_rm_rv_RW( bRm, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1188 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, or);
1189}
1190
1191
1192/**
1193 * @opcode 0x0a
1194 * @opgroup og_gen_arith_bin
1195 * @opflclass logical
1196 * @opcopytests iemOp_or_Eb_Gb
1197 */
1198FNIEMOP_DEF(iemOp_or_Gb_Eb)
1199{
1200 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1201 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1202 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1203 IEMOP_BODY_BINARY_r8_rm(bRm, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1204}
1205
1206
1207/**
1208 * @opcode 0x0b
1209 * @opgroup og_gen_arith_bin
1210 * @opflclass logical
1211 * @opcopytests iemOp_or_Ev_Gv
1212 */
1213FNIEMOP_DEF(iemOp_or_Gv_Ev)
1214{
1215 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1216 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1217 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1218 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 0, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1219}
1220
1221
1222/**
1223 * @opcode 0x0c
1224 * @opgroup og_gen_arith_bin
1225 * @opflclass logical
1226 * @opcopytests iemOp_or_Eb_Gb
1227 */
1228FNIEMOP_DEF(iemOp_or_Al_Ib)
1229{
1230 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1231 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1232 IEMOP_BODY_BINARY_AL_Ib(or, 0);
1233}
1234
1235
1236/**
1237 * @opcode 0x0d
1238 * @opgroup og_gen_arith_bin
1239 * @opflclass logical
1240 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
1241 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
1242 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1243 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1244 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1245 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1246 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
1247 */
1248FNIEMOP_DEF(iemOp_or_eAX_Iz)
1249{
1250 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1251 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1252 IEMOP_BODY_BINARY_rAX_Iz_RW(or, 0);
1253}
1254
1255
1256/**
1257 * @opcode 0x0e
1258 * @opgroup og_stack_sreg
1259 */
1260FNIEMOP_DEF(iemOp_push_CS)
1261{
1262 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
1263 IEMOP_HLP_NO_64BIT();
1264 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
1265}
1266
1267
1268/**
1269 * @opcode 0x0f
1270 * @opmnemonic EscTwo0f
1271 * @openc two0f
1272 * @opdisenum OP_2B_ESC
1273 * @ophints harmless
1274 * @opgroup og_escapes
1275 */
1276FNIEMOP_DEF(iemOp_2byteEscape)
1277{
1278#if 0 /// @todo def VBOX_STRICT
1279 /* Sanity check the table the first time around. */
1280 static bool s_fTested = false;
1281 if (RT_LIKELY(s_fTested)) { /* likely */ }
1282 else
1283 {
1284 s_fTested = true;
1285 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
1286 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
1287 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
1288 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
1289 }
1290#endif
1291
1292 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
1293 {
1294 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1295 IEMOP_HLP_MIN_286();
1296 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
1297 }
1298 /* @opdone */
1299
1300 /*
1301 * On the 8086 this is a POP CS instruction.
1302 * For the time being we don't specify this this.
1303 */
1304 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
1305 IEMOP_HLP_NO_64BIT();
1306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1307 /** @todo eliminate END_TB here */
1308 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1309 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1310 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_CS),
1311 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1312}
1313
1314/**
1315 * @opcode 0x10
1316 * @opgroup og_gen_arith_bin
1317 * @opflclass arithmetic_carry
1318 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1319 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1320 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1321 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1322 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1323 */
1324FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1325{
1326 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1327 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1328 IEMOP_BODY_BINARY_rm_r8_RW(bRm, adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1329}
1330
1331
1332/**
1333 * @opcode 0x11
1334 * @opgroup og_gen_arith_bin
1335 * @opflclass arithmetic_carry
1336 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1337 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1338 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1339 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1340 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1341 */
1342FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1343{
1344 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1345 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1346 IEMOP_BODY_BINARY_rm_rv_RW( bRm, adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1347 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, adc);
1348}
1349
1350
1351/**
1352 * @opcode 0x12
1353 * @opgroup og_gen_arith_bin
1354 * @opflclass arithmetic_carry
1355 * @opcopytests iemOp_adc_Eb_Gb
1356 */
1357FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1358{
1359 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1360 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1361 IEMOP_BODY_BINARY_r8_rm(bRm, adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1362}
1363
1364
1365/**
1366 * @opcode 0x13
1367 * @opgroup og_gen_arith_bin
1368 * @opflclass arithmetic_carry
1369 * @opcopytests iemOp_adc_Ev_Gv
1370 */
1371FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1372{
1373 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1374 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1375 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 0, adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1376}
1377
1378
1379/**
1380 * @opcode 0x14
1381 * @opgroup og_gen_arith_bin
1382 * @opflclass arithmetic_carry
1383 * @opcopytests iemOp_adc_Eb_Gb
1384 */
1385FNIEMOP_DEF(iemOp_adc_Al_Ib)
1386{
1387 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1388 IEMOP_BODY_BINARY_AL_Ib(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1389}
1390
1391
1392/**
1393 * @opcode 0x15
1394 * @opgroup og_gen_arith_bin
1395 * @opflclass arithmetic_carry
1396 * @opcopytests iemOp_adc_Ev_Gv
1397 */
1398FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1399{
1400 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1401 IEMOP_BODY_BINARY_rAX_Iz_RW(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1402}
1403
1404
1405/**
1406 * @opcode 0x16
1407 */
1408FNIEMOP_DEF(iemOp_push_SS)
1409{
1410 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1411 IEMOP_HLP_NO_64BIT();
1412 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1413}
1414
1415
1416/**
1417 * @opcode 0x17
1418 */
1419FNIEMOP_DEF(iemOp_pop_SS)
1420{
1421 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1423 IEMOP_HLP_NO_64BIT();
1424 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1425 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1426 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_SS)
1427 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_SS)
1428 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_SS)
1429 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_SS),
1430 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1431}
1432
1433
1434/**
1435 * @opcode 0x18
1436 * @opgroup og_gen_arith_bin
1437 * @opflclass arithmetic_carry
1438 */
1439FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1440{
1441 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1442 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1443 IEMOP_BODY_BINARY_rm_r8_RW(bRm, sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1444}
1445
1446
1447/**
1448 * @opcode 0x19
1449 * @opgroup og_gen_arith_bin
1450 * @opflclass arithmetic_carry
1451 */
1452FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1453{
1454 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1456 IEMOP_BODY_BINARY_rm_rv_RW( bRm, sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1457 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, sbb);
1458}
1459
1460
1461/**
1462 * @opcode 0x1a
1463 * @opgroup og_gen_arith_bin
1464 * @opflclass arithmetic_carry
1465 */
1466FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1467{
1468 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1470 IEMOP_BODY_BINARY_r8_rm(bRm, sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1471}
1472
1473
1474/**
1475 * @opcode 0x1b
1476 * @opgroup og_gen_arith_bin
1477 * @opflclass arithmetic_carry
1478 */
1479FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1480{
1481 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1482 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1483 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 0, sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1484}
1485
1486
1487/**
1488 * @opcode 0x1c
1489 * @opgroup og_gen_arith_bin
1490 * @opflclass arithmetic_carry
1491 */
1492FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1493{
1494 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1495 IEMOP_BODY_BINARY_AL_Ib(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1496}
1497
1498
1499/**
1500 * @opcode 0x1d
1501 * @opgroup og_gen_arith_bin
1502 * @opflclass arithmetic_carry
1503 */
1504FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1505{
1506 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1507 IEMOP_BODY_BINARY_rAX_Iz_RW(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1508}
1509
1510
1511/**
1512 * @opcode 0x1e
1513 * @opgroup og_stack_sreg
1514 */
1515FNIEMOP_DEF(iemOp_push_DS)
1516{
1517 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1518 IEMOP_HLP_NO_64BIT();
1519 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1520}
1521
1522
1523/**
1524 * @opcode 0x1f
1525 * @opgroup og_stack_sreg
1526 */
1527FNIEMOP_DEF(iemOp_pop_DS)
1528{
1529 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1531 IEMOP_HLP_NO_64BIT();
1532 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1533 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1534 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
1535 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
1536 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
1537 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS),
1538 iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1539}
1540
1541
1542/**
1543 * @opcode 0x20
1544 * @opgroup og_gen_arith_bin
1545 * @opflclass logical
1546 */
1547FNIEMOP_DEF(iemOp_and_Eb_Gb)
1548{
1549 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1550 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1551 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1552 IEMOP_BODY_BINARY_rm_r8_RW(bRm, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1553}
1554
1555
1556/**
1557 * @opcode 0x21
1558 * @opgroup og_gen_arith_bin
1559 * @opflclass logical
1560 */
1561FNIEMOP_DEF(iemOp_and_Ev_Gv)
1562{
1563 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1564 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1565 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1566 IEMOP_BODY_BINARY_rm_rv_RW( bRm, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1567 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, and);
1568}
1569
1570
1571/**
1572 * @opcode 0x22
1573 * @opgroup og_gen_arith_bin
1574 * @opflclass logical
1575 */
1576FNIEMOP_DEF(iemOp_and_Gb_Eb)
1577{
1578 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1579 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1581 IEMOP_BODY_BINARY_r8_rm(bRm, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1582}
1583
1584
1585/**
1586 * @opcode 0x23
1587 * @opgroup og_gen_arith_bin
1588 * @opflclass logical
1589 */
1590FNIEMOP_DEF(iemOp_and_Gv_Ev)
1591{
1592 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1593 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1594 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1595 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 0, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1596}
1597
1598
1599/**
1600 * @opcode 0x24
1601 * @opgroup og_gen_arith_bin
1602 * @opflclass logical
1603 */
1604FNIEMOP_DEF(iemOp_and_Al_Ib)
1605{
1606 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1607 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1608 IEMOP_BODY_BINARY_AL_Ib(and, 0);
1609}
1610
1611
1612/**
1613 * @opcode 0x25
1614 * @opgroup og_gen_arith_bin
1615 * @opflclass logical
1616 */
1617FNIEMOP_DEF(iemOp_and_eAX_Iz)
1618{
1619 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1620 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1621 IEMOP_BODY_BINARY_rAX_Iz_RW(and, 0);
1622}
1623
1624
1625/**
1626 * @opcode 0x26
1627 * @opmnemonic SEG
1628 * @op1 ES
1629 * @opgroup og_prefix
1630 * @openc prefix
1631 * @opdisenum OP_SEG
1632 * @ophints harmless
1633 */
1634FNIEMOP_DEF(iemOp_seg_ES)
1635{
1636 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1637 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1638 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1639
1640 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1641 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1642}
1643
1644
1645/**
1646 * @opcode 0x27
1647 * @opfltest af,cf
1648 * @opflmodify cf,pf,af,zf,sf,of
1649 * @opflundef of
1650 */
1651FNIEMOP_DEF(iemOp_daa)
1652{
1653 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1654 IEMOP_HLP_NO_64BIT();
1655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1656 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1657 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_daa);
1658}
1659
1660
1661/**
1662 * @opcode 0x28
1663 * @opgroup og_gen_arith_bin
1664 * @opflclass arithmetic
1665 */
1666FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1667{
1668 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1670 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */
1671 IEMOP_BODY_BINARY_rm_r8_RW(bRm, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1672}
1673
1674
1675/**
1676 * @opcode 0x29
1677 * @opgroup og_gen_arith_bin
1678 * @opflclass arithmetic
1679 */
1680FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1681{
1682 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1684 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */
1685 IEMOP_BODY_BINARY_rm_rv_RW( bRm, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1686 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, sub);
1687}
1688
1689
1690/**
1691 * @opcode 0x2a
1692 * @opgroup og_gen_arith_bin
1693 * @opflclass arithmetic
1694 */
1695FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1696{
1697 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1698 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1699 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */
1700 IEMOP_BODY_BINARY_r8_rm(bRm, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1701}
1702
1703
1704/**
1705 * @opcode 0x2b
1706 * @opgroup og_gen_arith_bin
1707 * @opflclass arithmetic
1708 */
1709FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1710{
1711 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1712 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1713 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */
1714 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 0, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1715}
1716
1717
1718/**
1719 * @opcode 0x2c
1720 * @opgroup og_gen_arith_bin
1721 * @opflclass arithmetic
1722 */
1723FNIEMOP_DEF(iemOp_sub_Al_Ib)
1724{
1725 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1726 IEMOP_BODY_BINARY_AL_Ib(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1727}
1728
1729
1730/**
1731 * @opcode 0x2d
1732 * @opgroup og_gen_arith_bin
1733 * @opflclass arithmetic
1734 */
1735FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1736{
1737 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1738 IEMOP_BODY_BINARY_rAX_Iz_RW(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1739}
1740
1741
1742/**
1743 * @opcode 0x2e
1744 * @opmnemonic SEG
1745 * @op1 CS
1746 * @opgroup og_prefix
1747 * @openc prefix
1748 * @opdisenum OP_SEG
1749 * @ophints harmless
1750 */
1751FNIEMOP_DEF(iemOp_seg_CS)
1752{
1753 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1754 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1755 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1756
1757 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1758 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1759}
1760
1761
1762/**
1763 * @opcode 0x2f
1764 * @opfltest af,cf
1765 * @opflmodify cf,pf,af,zf,sf,of
1766 * @opflundef of
1767 */
1768FNIEMOP_DEF(iemOp_das)
1769{
1770 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1771 IEMOP_HLP_NO_64BIT();
1772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1773 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1774 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_das);
1775}
1776
1777
1778/**
1779 * @opcode 0x30
1780 * @opgroup og_gen_arith_bin
1781 * @opflclass logical
1782 */
1783FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1784{
1785 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1786 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1787 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1788 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */
1789 IEMOP_BODY_BINARY_rm_r8_RW(bRm, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1790}
1791
1792
1793/**
1794 * @opcode 0x31
1795 * @opgroup og_gen_arith_bin
1796 * @opflclass logical
1797 */
1798FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1799{
1800 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1801 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1803 IEMOP_BODY_BINARY_rm_rv_RW( bRm, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1804 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */
1805 IEMOP_BODY_BINARY_rm_rv_LOCKED( bRm, xor);
1806}
1807
1808
1809/**
1810 * @opcode 0x32
1811 * @opgroup og_gen_arith_bin
1812 * @opflclass logical
1813 */
1814FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1815{
1816 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1817 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1818 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1819 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */
1820 IEMOP_BODY_BINARY_r8_rm(bRm, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1821}
1822
1823
1824/**
1825 * @opcode 0x33
1826 * @opgroup og_gen_arith_bin
1827 * @opflclass logical
1828 */
1829FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1830{
1831 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1832 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1833 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1834 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */
1835 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 0, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1836}
1837
1838
1839/**
1840 * @opcode 0x34
1841 * @opgroup og_gen_arith_bin
1842 * @opflclass logical
1843 */
1844FNIEMOP_DEF(iemOp_xor_Al_Ib)
1845{
1846 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1847 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1848 IEMOP_BODY_BINARY_AL_Ib(xor, 0);
1849}
1850
1851
1852/**
1853 * @opcode 0x35
1854 * @opgroup og_gen_arith_bin
1855 * @opflclass logical
1856 */
1857FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1858{
1859 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1860 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1861 IEMOP_BODY_BINARY_rAX_Iz_RW(xor, 0);
1862}
1863
1864
1865/**
1866 * @opcode 0x36
1867 * @opmnemonic SEG
1868 * @op1 SS
1869 * @opgroup og_prefix
1870 * @openc prefix
1871 * @opdisenum OP_SEG
1872 * @ophints harmless
1873 */
1874FNIEMOP_DEF(iemOp_seg_SS)
1875{
1876 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1877 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1878 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1879
1880 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1881 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1882}
1883
1884
1885/**
1886 * @opcode 0x37
1887 * @opfltest af
1888 * @opflmodify cf,pf,af,zf,sf,of
1889 * @opflundef pf,zf,sf,of
1890 * @opgroup og_gen_arith_dec
1891 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1892 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1893 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1894 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1895 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1896 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1897 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1898 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1899 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1900 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1901 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1902 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1903 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1904 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1905 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1906 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1907 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1908 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1909 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1910 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1911 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1912 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1913 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1914 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1915 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1916 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1917 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1918 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1919 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1920 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1921 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1922 */
1923FNIEMOP_DEF(iemOp_aaa)
1924{
1925 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1926 IEMOP_HLP_NO_64BIT();
1927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1928 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1929
1930 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aaa);
1931}
1932
1933
1934/**
1935 * @opcode 0x38
1936 * @opflclass arithmetic
1937 */
1938FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1939{
1940 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1941 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1942 IEMOP_BODY_BINARY_rm_r8_RO(bRm, iemAImpl_cmp_u8, cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1943}
1944
1945
1946/**
1947 * @opcode 0x39
1948 * @opflclass arithmetic
1949 */
1950FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1951{
1952 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1953 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1954 IEMOP_BODY_BINARY_rm_rv_RO(bRm, cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1955}
1956
1957
1958/**
1959 * @opcode 0x3a
1960 * @opflclass arithmetic
1961 */
1962FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1963{
1964 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1965 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1966 IEMOP_BODY_BINARY_r8_rm_RO(bRm, cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1967}
1968
1969
1970/**
1971 * @opcode 0x3b
1972 * @opflclass arithmetic
1973 */
1974FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1975{
1976 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1977 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1978 IEMOP_BODY_BINARY_rv_rm_RO(bRm, cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1979}
1980
1981
1982/**
1983 * @opcode 0x3c
1984 * @opflclass arithmetic
1985 */
1986FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1987{
1988 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1989 IEMOP_BODY_BINARY_AL_Ib(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1990}
1991
1992
1993/**
1994 * @opcode 0x3d
1995 * @opflclass arithmetic
1996 */
1997FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1998{
1999 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
2000 IEMOP_BODY_BINARY_rAX_Iz_RO(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
2001}
2002
2003
2004/**
2005 * @opcode 0x3e
2006 */
2007FNIEMOP_DEF(iemOp_seg_DS)
2008{
2009 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
2010 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
2011 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
2012
2013 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2014 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2015}
2016
2017
2018/**
2019 * @opcode 0x3f
2020 * @opfltest af
2021 * @opflmodify cf,pf,af,zf,sf,of
2022 * @opflundef pf,zf,sf,of
2023 * @opgroup og_gen_arith_dec
2024 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
2025 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
2026 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
2027 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
2028 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
2029 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
2030 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
2031 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
2032 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
2033 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
2034 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
2035 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
2036 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
2037 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
2038 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
2039 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
2040 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
2041 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
2042 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
2043 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
2044 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
2045 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
2046 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
2047 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
2048 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
2049 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
2050 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
2051 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
2052 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
2053 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
2054 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
2055 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
2056 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
2057 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
2058 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
2059 */
2060FNIEMOP_DEF(iemOp_aas)
2061{
2062 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
2063 IEMOP_HLP_NO_64BIT();
2064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2065 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
2066
2067 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aas);
2068}
2069
2070
2071/**
2072 * Common 'inc/dec register' helper.
2073 *
2074 * Not for 64-bit code, only for what became the rex prefixes.
2075 */
2076#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
2077 switch (pVCpu->iem.s.enmEffOpSize) \
2078 { \
2079 case IEMMODE_16BIT: \
2080 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT, 0); \
2081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2082 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
2083 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
2084 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
2085 IEM_MC_REF_EFLAGS(pEFlags); \
2086 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
2087 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2088 IEM_MC_END(); \
2089 break; \
2090 \
2091 case IEMMODE_32BIT: \
2092 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
2093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2094 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
2095 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
2096 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
2097 IEM_MC_REF_EFLAGS(pEFlags); \
2098 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
2099 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
2100 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2101 IEM_MC_END(); \
2102 break; \
2103 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2104 } \
2105 (void)0
2106
2107/**
2108 * @opcode 0x40
2109 * @opflclass incdec
2110 */
2111FNIEMOP_DEF(iemOp_inc_eAX)
2112{
2113 /*
2114 * This is a REX prefix in 64-bit mode.
2115 */
2116 if (IEM_IS_64BIT_CODE(pVCpu))
2117 {
2118 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
2119 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
2120
2121 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2122 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2123 }
2124
2125 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
2126 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
2127}
2128
2129
2130/**
2131 * @opcode 0x41
2132 * @opflclass incdec
2133 */
2134FNIEMOP_DEF(iemOp_inc_eCX)
2135{
2136 /*
2137 * This is a REX prefix in 64-bit mode.
2138 */
2139 if (IEM_IS_64BIT_CODE(pVCpu))
2140 {
2141 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
2142 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
2143 pVCpu->iem.s.uRexB = 1 << 3;
2144
2145 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2146 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2147 }
2148
2149 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
2150 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
2151}
2152
2153
2154/**
2155 * @opcode 0x42
2156 * @opflclass incdec
2157 */
2158FNIEMOP_DEF(iemOp_inc_eDX)
2159{
2160 /*
2161 * This is a REX prefix in 64-bit mode.
2162 */
2163 if (IEM_IS_64BIT_CODE(pVCpu))
2164 {
2165 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
2166 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
2167 pVCpu->iem.s.uRexIndex = 1 << 3;
2168
2169 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2170 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2171 }
2172
2173 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
2174 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
2175}
2176
2177
2178
2179/**
2180 * @opcode 0x43
2181 * @opflclass incdec
2182 */
2183FNIEMOP_DEF(iemOp_inc_eBX)
2184{
2185 /*
2186 * This is a REX prefix in 64-bit mode.
2187 */
2188 if (IEM_IS_64BIT_CODE(pVCpu))
2189 {
2190 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
2191 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
2192 pVCpu->iem.s.uRexB = 1 << 3;
2193 pVCpu->iem.s.uRexIndex = 1 << 3;
2194
2195 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2196 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2197 }
2198
2199 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
2200 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
2201}
2202
2203
2204/**
2205 * @opcode 0x44
2206 * @opflclass incdec
2207 */
2208FNIEMOP_DEF(iemOp_inc_eSP)
2209{
2210 /*
2211 * This is a REX prefix in 64-bit mode.
2212 */
2213 if (IEM_IS_64BIT_CODE(pVCpu))
2214 {
2215 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
2216 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
2217 pVCpu->iem.s.uRexReg = 1 << 3;
2218
2219 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2220 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2221 }
2222
2223 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
2224 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
2225}
2226
2227
2228/**
2229 * @opcode 0x45
2230 * @opflclass incdec
2231 */
2232FNIEMOP_DEF(iemOp_inc_eBP)
2233{
2234 /*
2235 * This is a REX prefix in 64-bit mode.
2236 */
2237 if (IEM_IS_64BIT_CODE(pVCpu))
2238 {
2239 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
2240 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
2241 pVCpu->iem.s.uRexReg = 1 << 3;
2242 pVCpu->iem.s.uRexB = 1 << 3;
2243
2244 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2245 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2246 }
2247
2248 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
2249 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
2250}
2251
2252
2253/**
2254 * @opcode 0x46
2255 * @opflclass incdec
2256 */
2257FNIEMOP_DEF(iemOp_inc_eSI)
2258{
2259 /*
2260 * This is a REX prefix in 64-bit mode.
2261 */
2262 if (IEM_IS_64BIT_CODE(pVCpu))
2263 {
2264 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
2265 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
2266 pVCpu->iem.s.uRexReg = 1 << 3;
2267 pVCpu->iem.s.uRexIndex = 1 << 3;
2268
2269 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2270 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2271 }
2272
2273 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
2274 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
2275}
2276
2277
2278/**
2279 * @opcode 0x47
2280 * @opflclass incdec
2281 */
2282FNIEMOP_DEF(iemOp_inc_eDI)
2283{
2284 /*
2285 * This is a REX prefix in 64-bit mode.
2286 */
2287 if (IEM_IS_64BIT_CODE(pVCpu))
2288 {
2289 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
2290 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
2291 pVCpu->iem.s.uRexReg = 1 << 3;
2292 pVCpu->iem.s.uRexB = 1 << 3;
2293 pVCpu->iem.s.uRexIndex = 1 << 3;
2294
2295 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2296 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2297 }
2298
2299 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
2300 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
2301}
2302
2303
2304/**
2305 * @opcode 0x48
2306 * @opflclass incdec
2307 */
2308FNIEMOP_DEF(iemOp_dec_eAX)
2309{
2310 /*
2311 * This is a REX prefix in 64-bit mode.
2312 */
2313 if (IEM_IS_64BIT_CODE(pVCpu))
2314 {
2315 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
2316 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
2317 iemRecalEffOpSize(pVCpu);
2318
2319 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2320 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2321 }
2322
2323 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2324 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2325}
2326
2327
2328/**
2329 * @opcode 0x49
2330 * @opflclass incdec
2331 */
2332FNIEMOP_DEF(iemOp_dec_eCX)
2333{
2334 /*
2335 * This is a REX prefix in 64-bit mode.
2336 */
2337 if (IEM_IS_64BIT_CODE(pVCpu))
2338 {
2339 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2340 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2341 pVCpu->iem.s.uRexB = 1 << 3;
2342 iemRecalEffOpSize(pVCpu);
2343
2344 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2345 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2346 }
2347
2348 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2349 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2350}
2351
2352
2353/**
2354 * @opcode 0x4a
2355 * @opflclass incdec
2356 */
2357FNIEMOP_DEF(iemOp_dec_eDX)
2358{
2359 /*
2360 * This is a REX prefix in 64-bit mode.
2361 */
2362 if (IEM_IS_64BIT_CODE(pVCpu))
2363 {
2364 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2365 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2366 pVCpu->iem.s.uRexIndex = 1 << 3;
2367 iemRecalEffOpSize(pVCpu);
2368
2369 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2370 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2371 }
2372
2373 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2374 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2375}
2376
2377
2378/**
2379 * @opcode 0x4b
2380 * @opflclass incdec
2381 */
2382FNIEMOP_DEF(iemOp_dec_eBX)
2383{
2384 /*
2385 * This is a REX prefix in 64-bit mode.
2386 */
2387 if (IEM_IS_64BIT_CODE(pVCpu))
2388 {
2389 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2390 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2391 pVCpu->iem.s.uRexB = 1 << 3;
2392 pVCpu->iem.s.uRexIndex = 1 << 3;
2393 iemRecalEffOpSize(pVCpu);
2394
2395 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2396 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2397 }
2398
2399 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2400 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2401}
2402
2403
2404/**
2405 * @opcode 0x4c
2406 * @opflclass incdec
2407 */
2408FNIEMOP_DEF(iemOp_dec_eSP)
2409{
2410 /*
2411 * This is a REX prefix in 64-bit mode.
2412 */
2413 if (IEM_IS_64BIT_CODE(pVCpu))
2414 {
2415 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2416 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2417 pVCpu->iem.s.uRexReg = 1 << 3;
2418 iemRecalEffOpSize(pVCpu);
2419
2420 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2421 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2422 }
2423
2424 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2425 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2426}
2427
2428
2429/**
2430 * @opcode 0x4d
2431 * @opflclass incdec
2432 */
2433FNIEMOP_DEF(iemOp_dec_eBP)
2434{
2435 /*
2436 * This is a REX prefix in 64-bit mode.
2437 */
2438 if (IEM_IS_64BIT_CODE(pVCpu))
2439 {
2440 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2441 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2442 pVCpu->iem.s.uRexReg = 1 << 3;
2443 pVCpu->iem.s.uRexB = 1 << 3;
2444 iemRecalEffOpSize(pVCpu);
2445
2446 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2447 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2448 }
2449
2450 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2451 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2452}
2453
2454
2455/**
2456 * @opcode 0x4e
2457 * @opflclass incdec
2458 */
2459FNIEMOP_DEF(iemOp_dec_eSI)
2460{
2461 /*
2462 * This is a REX prefix in 64-bit mode.
2463 */
2464 if (IEM_IS_64BIT_CODE(pVCpu))
2465 {
2466 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2467 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2468 pVCpu->iem.s.uRexReg = 1 << 3;
2469 pVCpu->iem.s.uRexIndex = 1 << 3;
2470 iemRecalEffOpSize(pVCpu);
2471
2472 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2473 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2474 }
2475
2476 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2477 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2478}
2479
2480
2481/**
2482 * @opcode 0x4f
2483 * @opflclass incdec
2484 */
2485FNIEMOP_DEF(iemOp_dec_eDI)
2486{
2487 /*
2488 * This is a REX prefix in 64-bit mode.
2489 */
2490 if (IEM_IS_64BIT_CODE(pVCpu))
2491 {
2492 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2493 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2494 pVCpu->iem.s.uRexReg = 1 << 3;
2495 pVCpu->iem.s.uRexB = 1 << 3;
2496 pVCpu->iem.s.uRexIndex = 1 << 3;
2497 iemRecalEffOpSize(pVCpu);
2498
2499 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2500 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2501 }
2502
2503 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2504 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2505}
2506
2507
2508/**
2509 * Common 'push register' helper.
2510 */
2511FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2512{
2513 if (IEM_IS_64BIT_CODE(pVCpu))
2514 {
2515 iReg |= pVCpu->iem.s.uRexB;
2516 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2517 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2518 }
2519
2520 switch (pVCpu->iem.s.enmEffOpSize)
2521 {
2522 case IEMMODE_16BIT:
2523 IEM_MC_BEGIN(0, 1, 0, 0);
2524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2525 IEM_MC_LOCAL(uint16_t, u16Value);
2526 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2527 IEM_MC_PUSH_U16(u16Value);
2528 IEM_MC_ADVANCE_RIP_AND_FINISH();
2529 IEM_MC_END();
2530 break;
2531
2532 case IEMMODE_32BIT:
2533 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2535 IEM_MC_LOCAL(uint32_t, u32Value);
2536 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2537 IEM_MC_PUSH_U32(u32Value);
2538 IEM_MC_ADVANCE_RIP_AND_FINISH();
2539 IEM_MC_END();
2540 break;
2541
2542 case IEMMODE_64BIT:
2543 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2545 IEM_MC_LOCAL(uint64_t, u64Value);
2546 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2547 IEM_MC_PUSH_U64(u64Value);
2548 IEM_MC_ADVANCE_RIP_AND_FINISH();
2549 IEM_MC_END();
2550 break;
2551
2552 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2553 }
2554}
2555
2556
2557/**
2558 * @opcode 0x50
2559 */
2560FNIEMOP_DEF(iemOp_push_eAX)
2561{
2562 IEMOP_MNEMONIC(push_rAX, "push rAX");
2563 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2564}
2565
2566
2567/**
2568 * @opcode 0x51
2569 */
2570FNIEMOP_DEF(iemOp_push_eCX)
2571{
2572 IEMOP_MNEMONIC(push_rCX, "push rCX");
2573 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2574}
2575
2576
2577/**
2578 * @opcode 0x52
2579 */
2580FNIEMOP_DEF(iemOp_push_eDX)
2581{
2582 IEMOP_MNEMONIC(push_rDX, "push rDX");
2583 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2584}
2585
2586
2587/**
2588 * @opcode 0x53
2589 */
2590FNIEMOP_DEF(iemOp_push_eBX)
2591{
2592 IEMOP_MNEMONIC(push_rBX, "push rBX");
2593 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2594}
2595
2596
2597/**
2598 * @opcode 0x54
2599 */
2600FNIEMOP_DEF(iemOp_push_eSP)
2601{
2602 IEMOP_MNEMONIC(push_rSP, "push rSP");
2603 if (IEM_GET_TARGET_CPU(pVCpu) != IEMTARGETCPU_8086)
2604 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2605
2606 /* 8086 works differently wrt to 'push sp' compared to 80186 and later. */
2607 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086, 0);
2608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2609 IEM_MC_LOCAL(uint16_t, u16Value);
2610 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2611 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2612 IEM_MC_PUSH_U16(u16Value);
2613 IEM_MC_ADVANCE_RIP_AND_FINISH();
2614 IEM_MC_END();
2615}
2616
2617
2618/**
2619 * @opcode 0x55
2620 */
2621FNIEMOP_DEF(iemOp_push_eBP)
2622{
2623 IEMOP_MNEMONIC(push_rBP, "push rBP");
2624 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2625}
2626
2627
2628/**
2629 * @opcode 0x56
2630 */
2631FNIEMOP_DEF(iemOp_push_eSI)
2632{
2633 IEMOP_MNEMONIC(push_rSI, "push rSI");
2634 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2635}
2636
2637
2638/**
2639 * @opcode 0x57
2640 */
2641FNIEMOP_DEF(iemOp_push_eDI)
2642{
2643 IEMOP_MNEMONIC(push_rDI, "push rDI");
2644 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2645}
2646
2647
2648/**
2649 * Common 'pop register' helper.
2650 */
2651FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2652{
2653 if (IEM_IS_64BIT_CODE(pVCpu))
2654 {
2655 iReg |= pVCpu->iem.s.uRexB;
2656 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2657 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2658 }
2659
2660 switch (pVCpu->iem.s.enmEffOpSize)
2661 {
2662 case IEMMODE_16BIT:
2663 IEM_MC_BEGIN(0, 0, 0, 0);
2664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2665 IEM_MC_POP_GREG_U16(iReg);
2666 IEM_MC_ADVANCE_RIP_AND_FINISH();
2667 IEM_MC_END();
2668 break;
2669
2670 case IEMMODE_32BIT:
2671 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2673 IEM_MC_POP_GREG_U32(iReg);
2674 IEM_MC_ADVANCE_RIP_AND_FINISH();
2675 IEM_MC_END();
2676 break;
2677
2678 case IEMMODE_64BIT:
2679 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
2680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2681 IEM_MC_POP_GREG_U64(iReg);
2682 IEM_MC_ADVANCE_RIP_AND_FINISH();
2683 IEM_MC_END();
2684 break;
2685
2686 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2687 }
2688}
2689
2690
2691/**
2692 * @opcode 0x58
2693 */
2694FNIEMOP_DEF(iemOp_pop_eAX)
2695{
2696 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2697 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2698}
2699
2700
2701/**
2702 * @opcode 0x59
2703 */
2704FNIEMOP_DEF(iemOp_pop_eCX)
2705{
2706 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2707 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2708}
2709
2710
2711/**
2712 * @opcode 0x5a
2713 */
2714FNIEMOP_DEF(iemOp_pop_eDX)
2715{
2716 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2717 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2718}
2719
2720
2721/**
2722 * @opcode 0x5b
2723 */
2724FNIEMOP_DEF(iemOp_pop_eBX)
2725{
2726 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2727 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2728}
2729
2730
2731/**
2732 * @opcode 0x5c
2733 */
2734FNIEMOP_DEF(iemOp_pop_eSP)
2735{
2736 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2737 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2738}
2739
2740
2741/**
2742 * @opcode 0x5d
2743 */
2744FNIEMOP_DEF(iemOp_pop_eBP)
2745{
2746 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2747 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2748}
2749
2750
2751/**
2752 * @opcode 0x5e
2753 */
2754FNIEMOP_DEF(iemOp_pop_eSI)
2755{
2756 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2757 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2758}
2759
2760
2761/**
2762 * @opcode 0x5f
2763 */
2764FNIEMOP_DEF(iemOp_pop_eDI)
2765{
2766 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2767 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2768}
2769
2770
2771/**
2772 * @opcode 0x60
2773 */
2774FNIEMOP_DEF(iemOp_pusha)
2775{
2776 IEMOP_MNEMONIC(pusha, "pusha");
2777 IEMOP_HLP_MIN_186();
2778 IEMOP_HLP_NO_64BIT();
2779 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2780 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_16);
2781 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2782 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_32);
2783}
2784
2785
2786/**
2787 * @opcode 0x61
2788 */
2789FNIEMOP_DEF(iemOp_popa__mvex)
2790{
2791 if (!IEM_IS_64BIT_CODE(pVCpu))
2792 {
2793 IEMOP_MNEMONIC(popa, "popa");
2794 IEMOP_HLP_MIN_186();
2795 IEMOP_HLP_NO_64BIT();
2796 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2797 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2798 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2799 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2800 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2801 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2802 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2803 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2804 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2805 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2806 iemCImpl_popa_16);
2807 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2808 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2809 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2810 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2811 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2812 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2813 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2814 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2815 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2816 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2817 iemCImpl_popa_32);
2818 }
2819 IEMOP_MNEMONIC(mvex, "mvex");
2820 Log(("mvex prefix is not supported!\n"));
2821 IEMOP_RAISE_INVALID_OPCODE_RET();
2822}
2823
2824
2825/**
2826 * @opcode 0x62
2827 * @opmnemonic bound
2828 * @op1 Gv_RO
2829 * @op2 Ma
2830 * @opmincpu 80186
2831 * @ophints harmless x86_invalid_64
2832 * @optest op1=0 op2=0 ->
2833 * @optest op1=1 op2=0 -> value.xcpt=5
2834 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2835 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2836 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2837 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2838 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2839 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2840 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2841 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2842 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2843 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2844 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2845 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2846 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2847 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2848 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2849 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2850 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2851 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2852 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2853 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2854 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2855 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2856 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2857 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2858 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2859 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2860 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2861 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2862 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2863 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2864 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2865 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2866 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2867 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2868 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2869 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2870 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2871 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2872 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2873 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2874 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2875 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2876 */
2877FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2878{
2879 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2880 compatability mode it is invalid with MOD=3.
2881
2882 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2883 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2884 given as R and X without an exact description, so we assume it builds on
2885 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2886 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2887 uint8_t bRm;
2888 if (!IEM_IS_64BIT_CODE(pVCpu))
2889 {
2890 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2891 IEMOP_HLP_MIN_186();
2892 IEM_OPCODE_GET_NEXT_U8(&bRm);
2893 if (IEM_IS_MODRM_MEM_MODE(bRm))
2894 {
2895 /** @todo testcase: check that there are two memory accesses involved. Check
2896 * whether they're both read before the \#BR triggers. */
2897 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2898 {
2899 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
2900 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2901 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2902 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2904
2905 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2907
2908 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2909 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2910 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2911
2912 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2913 IEM_MC_END();
2914 }
2915 else /* 32-bit operands */
2916 {
2917 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2918 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2919 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2920 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2922
2923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2925
2926 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2927 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2928 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2929
2930 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2931 IEM_MC_END();
2932 }
2933 }
2934
2935 /*
2936 * @opdone
2937 */
2938 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2939 {
2940 /* Note that there is no need for the CPU to fetch further bytes
2941 here because MODRM.MOD == 3. */
2942 Log(("evex not supported by the guest CPU!\n"));
2943 IEMOP_RAISE_INVALID_OPCODE_RET();
2944 }
2945 }
2946 else
2947 {
2948 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2949 * does modr/m read, whereas AMD probably doesn't... */
2950 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2951 {
2952 Log(("evex not supported by the guest CPU!\n"));
2953 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2954 }
2955 IEM_OPCODE_GET_NEXT_U8(&bRm);
2956 }
2957
2958 IEMOP_MNEMONIC(evex, "evex");
2959 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2960 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2961 Log(("evex prefix is not implemented!\n"));
2962 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2963}
2964
2965
2966/**
2967 * @opcode 0x63
2968 * @opflmodify zf
2969 * @note non-64-bit modes.
2970 */
2971FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2972{
2973 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2974 IEMOP_HLP_MIN_286();
2975 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2977
2978 if (IEM_IS_MODRM_REG_MODE(bRm))
2979 {
2980 /* Register */
2981 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2982 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2983 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2984 IEM_MC_ARG(uint16_t, u16Src, 1);
2985 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2986
2987 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2988 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2989 IEM_MC_REF_EFLAGS(pEFlags);
2990 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2991
2992 IEM_MC_ADVANCE_RIP_AND_FINISH();
2993 IEM_MC_END();
2994 }
2995 else
2996 {
2997 /* Memory */
2998 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2999 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3000 IEM_MC_ARG(uint16_t, u16Src, 1);
3001 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
3002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3003 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
3004
3005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3006 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
3007 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3008 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
3009 IEM_MC_FETCH_EFLAGS(EFlags);
3010 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
3011
3012 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
3013 IEM_MC_COMMIT_EFLAGS(EFlags);
3014 IEM_MC_ADVANCE_RIP_AND_FINISH();
3015 IEM_MC_END();
3016 }
3017}
3018
3019
3020/**
3021 * @opcode 0x63
3022 *
3023 * @note This is a weird one. It works like a regular move instruction if
3024 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
3025 * @todo This definitely needs a testcase to verify the odd cases. */
3026FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
3027{
3028 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
3029
3030 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
3031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3032
3033 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3034 {
3035 if (IEM_IS_MODRM_REG_MODE(bRm))
3036 {
3037 /*
3038 * Register to register.
3039 */
3040 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
3041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3042 IEM_MC_LOCAL(uint64_t, u64Value);
3043 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
3044 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
3045 IEM_MC_ADVANCE_RIP_AND_FINISH();
3046 IEM_MC_END();
3047 }
3048 else
3049 {
3050 /*
3051 * We're loading a register from memory.
3052 */
3053 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
3054 IEM_MC_LOCAL(uint64_t, u64Value);
3055 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3058 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3059 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
3060 IEM_MC_ADVANCE_RIP_AND_FINISH();
3061 IEM_MC_END();
3062 }
3063 }
3064 else
3065 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
3066}
3067
3068
3069/**
3070 * @opcode 0x64
3071 * @opmnemonic segfs
3072 * @opmincpu 80386
3073 * @opgroup og_prefixes
3074 */
3075FNIEMOP_DEF(iemOp_seg_FS)
3076{
3077 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
3078 IEMOP_HLP_MIN_386();
3079
3080 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
3081 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
3082
3083 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3084 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3085}
3086
3087
3088/**
3089 * @opcode 0x65
3090 * @opmnemonic seggs
3091 * @opmincpu 80386
3092 * @opgroup og_prefixes
3093 */
3094FNIEMOP_DEF(iemOp_seg_GS)
3095{
3096 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
3097 IEMOP_HLP_MIN_386();
3098
3099 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
3100 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
3101
3102 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3103 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3104}
3105
3106
3107/**
3108 * @opcode 0x66
3109 * @opmnemonic opsize
3110 * @openc prefix
3111 * @opmincpu 80386
3112 * @ophints harmless
3113 * @opgroup og_prefixes
3114 */
3115FNIEMOP_DEF(iemOp_op_size)
3116{
3117 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
3118 IEMOP_HLP_MIN_386();
3119
3120 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
3121 iemRecalEffOpSize(pVCpu);
3122
3123 /* For the 4 entry opcode tables, the operand prefix doesn't not count
3124 when REPZ or REPNZ are present. */
3125 if (pVCpu->iem.s.idxPrefix == 0)
3126 pVCpu->iem.s.idxPrefix = 1;
3127
3128 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3129 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3130}
3131
3132
3133/**
3134 * @opcode 0x67
3135 * @opmnemonic addrsize
3136 * @openc prefix
3137 * @opmincpu 80386
3138 * @ophints harmless
3139 * @opgroup og_prefixes
3140 */
3141FNIEMOP_DEF(iemOp_addr_size)
3142{
3143 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
3144 IEMOP_HLP_MIN_386();
3145
3146 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
3147 switch (pVCpu->iem.s.enmDefAddrMode)
3148 {
3149 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
3150 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
3151 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
3152 default: AssertFailed();
3153 }
3154
3155 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3156 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3157}
3158
3159
3160/**
3161 * @opcode 0x68
3162 */
3163FNIEMOP_DEF(iemOp_push_Iz)
3164{
3165 IEMOP_MNEMONIC(push_Iz, "push Iz");
3166 IEMOP_HLP_MIN_186();
3167 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3168 switch (pVCpu->iem.s.enmEffOpSize)
3169 {
3170 case IEMMODE_16BIT:
3171 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
3172 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3174 IEM_MC_LOCAL_CONST(uint16_t, u16Value, u16Imm);
3175 IEM_MC_PUSH_U16(u16Value);
3176 IEM_MC_ADVANCE_RIP_AND_FINISH();
3177 IEM_MC_END();
3178 break;
3179
3180 case IEMMODE_32BIT:
3181 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3182 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3184 IEM_MC_LOCAL_CONST(uint32_t, u32Value, u32Imm);
3185 IEM_MC_PUSH_U32(u32Value);
3186 IEM_MC_ADVANCE_RIP_AND_FINISH();
3187 IEM_MC_END();
3188 break;
3189
3190 case IEMMODE_64BIT:
3191 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
3192 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3194 IEM_MC_LOCAL_CONST(uint64_t, u64Value, u64Imm);
3195 IEM_MC_PUSH_U64(u64Value);
3196 IEM_MC_ADVANCE_RIP_AND_FINISH();
3197 IEM_MC_END();
3198 break;
3199
3200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3201 }
3202}
3203
3204
3205/**
3206 * @opcode 0x69
3207 * @opflclass multiply
3208 */
3209FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
3210{
3211 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
3212 IEMOP_HLP_MIN_186();
3213 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3214 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3215
3216 switch (pVCpu->iem.s.enmEffOpSize)
3217 {
3218 case IEMMODE_16BIT:
3219 {
3220 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3221 if (IEM_IS_MODRM_REG_MODE(bRm))
3222 {
3223 /* register operand */
3224 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3225 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3227 IEM_MC_LOCAL(uint16_t, u16Tmp);
3228 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3229 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3230 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
3231 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3232 IEM_MC_REF_EFLAGS(pEFlags);
3233 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3234 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3235
3236 IEM_MC_ADVANCE_RIP_AND_FINISH();
3237 IEM_MC_END();
3238 }
3239 else
3240 {
3241 /* memory operand */
3242 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
3245
3246 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3248
3249 IEM_MC_LOCAL(uint16_t, u16Tmp);
3250 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3251
3252 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3253 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3255 IEM_MC_REF_EFLAGS(pEFlags);
3256 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3257 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3258
3259 IEM_MC_ADVANCE_RIP_AND_FINISH();
3260 IEM_MC_END();
3261 }
3262 break;
3263 }
3264
3265 case IEMMODE_32BIT:
3266 {
3267 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3268 if (IEM_IS_MODRM_REG_MODE(bRm))
3269 {
3270 /* register operand */
3271 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3272 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3274 IEM_MC_LOCAL(uint32_t, u32Tmp);
3275 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3276
3277 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3278 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
3279 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3280 IEM_MC_REF_EFLAGS(pEFlags);
3281 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3282 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3283
3284 IEM_MC_ADVANCE_RIP_AND_FINISH();
3285 IEM_MC_END();
3286 }
3287 else
3288 {
3289 /* memory operand */
3290 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3291 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3293
3294 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3296
3297 IEM_MC_LOCAL(uint32_t, u32Tmp);
3298 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3299
3300 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3301 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3302 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3303 IEM_MC_REF_EFLAGS(pEFlags);
3304 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3305 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3306
3307 IEM_MC_ADVANCE_RIP_AND_FINISH();
3308 IEM_MC_END();
3309 }
3310 break;
3311 }
3312
3313 case IEMMODE_64BIT:
3314 {
3315 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3316 if (IEM_IS_MODRM_REG_MODE(bRm))
3317 {
3318 /* register operand */
3319 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3320 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3322 IEM_MC_LOCAL(uint64_t, u64Tmp);
3323 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3324
3325 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3326 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
3327 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3328 IEM_MC_REF_EFLAGS(pEFlags);
3329 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3330 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3331
3332 IEM_MC_ADVANCE_RIP_AND_FINISH();
3333 IEM_MC_END();
3334 }
3335 else
3336 {
3337 /* memory operand */
3338 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3341
3342 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /* parameter count for the threaded function for this block. */
3344
3345 IEM_MC_LOCAL(uint64_t, u64Tmp);
3346 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3347
3348 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3349 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int32_t)u32Imm, 1);
3350 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3351 IEM_MC_REF_EFLAGS(pEFlags);
3352 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3353 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3354
3355 IEM_MC_ADVANCE_RIP_AND_FINISH();
3356 IEM_MC_END();
3357 }
3358 break;
3359 }
3360
3361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3362 }
3363}
3364
3365
3366/**
3367 * @opcode 0x6a
3368 */
3369FNIEMOP_DEF(iemOp_push_Ib)
3370{
3371 IEMOP_MNEMONIC(push_Ib, "push Ib");
3372 IEMOP_HLP_MIN_186();
3373 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3374 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3375
3376 switch (pVCpu->iem.s.enmEffOpSize)
3377 {
3378 case IEMMODE_16BIT:
3379 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_186, 0);
3380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3381 IEM_MC_LOCAL_CONST(uint16_t, uValue, (int16_t)i8Imm);
3382 IEM_MC_PUSH_U16(uValue);
3383 IEM_MC_ADVANCE_RIP_AND_FINISH();
3384 IEM_MC_END();
3385 break;
3386 case IEMMODE_32BIT:
3387 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3389 IEM_MC_LOCAL_CONST(uint32_t, uValue, (int32_t)i8Imm);
3390 IEM_MC_PUSH_U32(uValue);
3391 IEM_MC_ADVANCE_RIP_AND_FINISH();
3392 IEM_MC_END();
3393 break;
3394 case IEMMODE_64BIT:
3395 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
3396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3397 IEM_MC_LOCAL_CONST(uint64_t, uValue, (int64_t)i8Imm);
3398 IEM_MC_PUSH_U64(uValue);
3399 IEM_MC_ADVANCE_RIP_AND_FINISH();
3400 IEM_MC_END();
3401 break;
3402 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3403 }
3404}
3405
3406
3407/**
3408 * @opcode 0x6b
3409 * @opflclass multiply
3410 */
3411FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3412{
3413 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3414 IEMOP_HLP_MIN_186();
3415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3416 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3417
3418 switch (pVCpu->iem.s.enmEffOpSize)
3419 {
3420 case IEMMODE_16BIT:
3421 {
3422 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3423 if (IEM_IS_MODRM_REG_MODE(bRm))
3424 {
3425 /* register operand */
3426 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3427 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3429
3430 IEM_MC_LOCAL(uint16_t, u16Tmp);
3431 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3432
3433 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3434 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3435 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3436 IEM_MC_REF_EFLAGS(pEFlags);
3437 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3438 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3439
3440 IEM_MC_ADVANCE_RIP_AND_FINISH();
3441 IEM_MC_END();
3442 }
3443 else
3444 {
3445 /* memory operand */
3446 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3447
3448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3450
3451 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3453
3454 IEM_MC_LOCAL(uint16_t, u16Tmp);
3455 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3456
3457 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3458 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3459 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3460 IEM_MC_REF_EFLAGS(pEFlags);
3461 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3462 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3463
3464 IEM_MC_ADVANCE_RIP_AND_FINISH();
3465 IEM_MC_END();
3466 }
3467 break;
3468 }
3469
3470 case IEMMODE_32BIT:
3471 {
3472 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3473 if (IEM_IS_MODRM_REG_MODE(bRm))
3474 {
3475 /* register operand */
3476 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3477 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3479 IEM_MC_LOCAL(uint32_t, u32Tmp);
3480 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3481
3482 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3483 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3484 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3485 IEM_MC_REF_EFLAGS(pEFlags);
3486 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3487 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3488
3489 IEM_MC_ADVANCE_RIP_AND_FINISH();
3490 IEM_MC_END();
3491 }
3492 else
3493 {
3494 /* memory operand */
3495 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3498
3499 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3501
3502 IEM_MC_LOCAL(uint32_t, u32Tmp);
3503 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3504
3505 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3506 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3507 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3508 IEM_MC_REF_EFLAGS(pEFlags);
3509 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3510 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3511
3512 IEM_MC_ADVANCE_RIP_AND_FINISH();
3513 IEM_MC_END();
3514 }
3515 break;
3516 }
3517
3518 case IEMMODE_64BIT:
3519 {
3520 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3521 if (IEM_IS_MODRM_REG_MODE(bRm))
3522 {
3523 /* register operand */
3524 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3525 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3527 IEM_MC_LOCAL(uint64_t, u64Tmp);
3528 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3529
3530 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3531 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3532 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3533 IEM_MC_REF_EFLAGS(pEFlags);
3534 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3535 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3536
3537 IEM_MC_ADVANCE_RIP_AND_FINISH();
3538 IEM_MC_END();
3539 }
3540 else
3541 {
3542 /* memory operand */
3543 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3546
3547 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the threaded parameter count. */
3548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3549
3550 IEM_MC_LOCAL(uint64_t, u64Tmp);
3551 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3552
3553 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3554 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3555 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3556 IEM_MC_REF_EFLAGS(pEFlags);
3557 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3558 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3559
3560 IEM_MC_ADVANCE_RIP_AND_FINISH();
3561 IEM_MC_END();
3562 }
3563 break;
3564 }
3565
3566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3567 }
3568}
3569
3570
3571/**
3572 * @opcode 0x6c
3573 * @opfltest iopl,df
3574 */
3575FNIEMOP_DEF(iemOp_insb_Yb_DX)
3576{
3577 IEMOP_HLP_MIN_186();
3578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3579 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3580 {
3581 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3582 switch (pVCpu->iem.s.enmEffAddrMode)
3583 {
3584 case IEMMODE_16BIT:
3585 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3586 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3587 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3588 iemCImpl_rep_ins_op8_addr16, false);
3589 case IEMMODE_32BIT:
3590 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3591 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3592 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3593 iemCImpl_rep_ins_op8_addr32, false);
3594 case IEMMODE_64BIT:
3595 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3596 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3597 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3598 iemCImpl_rep_ins_op8_addr64, false);
3599 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3600 }
3601 }
3602 else
3603 {
3604 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3605 switch (pVCpu->iem.s.enmEffAddrMode)
3606 {
3607 case IEMMODE_16BIT:
3608 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3609 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3610 iemCImpl_ins_op8_addr16, false);
3611 case IEMMODE_32BIT:
3612 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3613 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3614 iemCImpl_ins_op8_addr32, false);
3615 case IEMMODE_64BIT:
3616 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3617 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3618 iemCImpl_ins_op8_addr64, false);
3619 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3620 }
3621 }
3622}
3623
3624
3625/**
3626 * @opcode 0x6d
3627 * @opfltest iopl,df
3628 */
3629FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3630{
3631 IEMOP_HLP_MIN_186();
3632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3633 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3634 {
3635 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3636 switch (pVCpu->iem.s.enmEffOpSize)
3637 {
3638 case IEMMODE_16BIT:
3639 switch (pVCpu->iem.s.enmEffAddrMode)
3640 {
3641 case IEMMODE_16BIT:
3642 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3643 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3644 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3645 iemCImpl_rep_ins_op16_addr16, false);
3646 case IEMMODE_32BIT:
3647 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3648 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3649 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3650 iemCImpl_rep_ins_op16_addr32, false);
3651 case IEMMODE_64BIT:
3652 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3653 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3654 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3655 iemCImpl_rep_ins_op16_addr64, false);
3656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3657 }
3658 break;
3659 case IEMMODE_64BIT:
3660 case IEMMODE_32BIT:
3661 switch (pVCpu->iem.s.enmEffAddrMode)
3662 {
3663 case IEMMODE_16BIT:
3664 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3665 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3666 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3667 iemCImpl_rep_ins_op32_addr16, false);
3668 case IEMMODE_32BIT:
3669 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3670 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3671 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3672 iemCImpl_rep_ins_op32_addr32, false);
3673 case IEMMODE_64BIT:
3674 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3675 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3676 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3677 iemCImpl_rep_ins_op32_addr64, false);
3678 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3679 }
3680 break;
3681 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3682 }
3683 }
3684 else
3685 {
3686 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3687 switch (pVCpu->iem.s.enmEffOpSize)
3688 {
3689 case IEMMODE_16BIT:
3690 switch (pVCpu->iem.s.enmEffAddrMode)
3691 {
3692 case IEMMODE_16BIT:
3693 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3694 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3695 iemCImpl_ins_op16_addr16, false);
3696 case IEMMODE_32BIT:
3697 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3698 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3699 iemCImpl_ins_op16_addr32, false);
3700 case IEMMODE_64BIT:
3701 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3702 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3703 iemCImpl_ins_op16_addr64, false);
3704 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3705 }
3706 break;
3707 case IEMMODE_64BIT:
3708 case IEMMODE_32BIT:
3709 switch (pVCpu->iem.s.enmEffAddrMode)
3710 {
3711 case IEMMODE_16BIT:
3712 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3713 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3714 iemCImpl_ins_op32_addr16, false);
3715 case IEMMODE_32BIT:
3716 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3717 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3718 iemCImpl_ins_op32_addr32, false);
3719 case IEMMODE_64BIT:
3720 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3721 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3722 iemCImpl_ins_op32_addr64, false);
3723 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3724 }
3725 break;
3726 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3727 }
3728 }
3729}
3730
3731
3732/**
3733 * @opcode 0x6e
3734 * @opfltest iopl,df
3735 */
3736FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3737{
3738 IEMOP_HLP_MIN_186();
3739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3740 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3741 {
3742 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3743 switch (pVCpu->iem.s.enmEffAddrMode)
3744 {
3745 case IEMMODE_16BIT:
3746 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3747 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3748 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3749 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3750 case IEMMODE_32BIT:
3751 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3752 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3753 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3754 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3755 case IEMMODE_64BIT:
3756 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3757 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3758 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3759 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3761 }
3762 }
3763 else
3764 {
3765 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3766 switch (pVCpu->iem.s.enmEffAddrMode)
3767 {
3768 case IEMMODE_16BIT:
3769 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3770 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3771 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3772 case IEMMODE_32BIT:
3773 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3774 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3775 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3776 case IEMMODE_64BIT:
3777 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3778 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3779 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3780 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3781 }
3782 }
3783}
3784
3785
3786/**
3787 * @opcode 0x6f
3788 * @opfltest iopl,df
3789 */
3790FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3791{
3792 IEMOP_HLP_MIN_186();
3793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3794 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3795 {
3796 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3797 switch (pVCpu->iem.s.enmEffOpSize)
3798 {
3799 case IEMMODE_16BIT:
3800 switch (pVCpu->iem.s.enmEffAddrMode)
3801 {
3802 case IEMMODE_16BIT:
3803 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3804 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3805 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3806 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3807 case IEMMODE_32BIT:
3808 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3809 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3810 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3811 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3812 case IEMMODE_64BIT:
3813 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3814 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3815 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3816 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3817 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3818 }
3819 break;
3820 case IEMMODE_64BIT:
3821 case IEMMODE_32BIT:
3822 switch (pVCpu->iem.s.enmEffAddrMode)
3823 {
3824 case IEMMODE_16BIT:
3825 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3826 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3827 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3828 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3829 case IEMMODE_32BIT:
3830 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3831 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3832 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3833 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3834 case IEMMODE_64BIT:
3835 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3836 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3837 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3838 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3840 }
3841 break;
3842 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3843 }
3844 }
3845 else
3846 {
3847 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3848 switch (pVCpu->iem.s.enmEffOpSize)
3849 {
3850 case IEMMODE_16BIT:
3851 switch (pVCpu->iem.s.enmEffAddrMode)
3852 {
3853 case IEMMODE_16BIT:
3854 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3855 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3856 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3857 case IEMMODE_32BIT:
3858 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3859 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3860 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3861 case IEMMODE_64BIT:
3862 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3863 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3864 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3865 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3866 }
3867 break;
3868 case IEMMODE_64BIT:
3869 case IEMMODE_32BIT:
3870 switch (pVCpu->iem.s.enmEffAddrMode)
3871 {
3872 case IEMMODE_16BIT:
3873 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3874 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3875 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3876 case IEMMODE_32BIT:
3877 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3878 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3879 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3880 case IEMMODE_64BIT:
3881 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3882 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3883 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3884 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3885 }
3886 break;
3887 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3888 }
3889 }
3890}
3891
3892
3893/**
3894 * @opcode 0x70
3895 * @opfltest of
3896 */
3897FNIEMOP_DEF(iemOp_jo_Jb)
3898{
3899 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3900 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3901 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3902
3903 IEM_MC_BEGIN(0, 0, 0, 0);
3904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3905 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3906 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3907 } IEM_MC_ELSE() {
3908 IEM_MC_ADVANCE_RIP_AND_FINISH();
3909 } IEM_MC_ENDIF();
3910 IEM_MC_END();
3911}
3912
3913
3914/**
3915 * @opcode 0x71
3916 * @opfltest of
3917 */
3918FNIEMOP_DEF(iemOp_jno_Jb)
3919{
3920 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3921 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3922 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3923
3924 IEM_MC_BEGIN(0, 0, 0, 0);
3925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3926 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3927 IEM_MC_ADVANCE_RIP_AND_FINISH();
3928 } IEM_MC_ELSE() {
3929 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3930 } IEM_MC_ENDIF();
3931 IEM_MC_END();
3932}
3933
3934/**
3935 * @opcode 0x72
3936 * @opfltest cf
3937 */
3938FNIEMOP_DEF(iemOp_jc_Jb)
3939{
3940 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3941 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3942 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3943
3944 IEM_MC_BEGIN(0, 0, 0, 0);
3945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3946 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3947 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3948 } IEM_MC_ELSE() {
3949 IEM_MC_ADVANCE_RIP_AND_FINISH();
3950 } IEM_MC_ENDIF();
3951 IEM_MC_END();
3952}
3953
3954
3955/**
3956 * @opcode 0x73
3957 * @opfltest cf
3958 */
3959FNIEMOP_DEF(iemOp_jnc_Jb)
3960{
3961 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3962 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3963 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3964
3965 IEM_MC_BEGIN(0, 0, 0, 0);
3966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3967 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3968 IEM_MC_ADVANCE_RIP_AND_FINISH();
3969 } IEM_MC_ELSE() {
3970 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3971 } IEM_MC_ENDIF();
3972 IEM_MC_END();
3973}
3974
3975
3976/**
3977 * @opcode 0x74
3978 * @opfltest zf
3979 */
3980FNIEMOP_DEF(iemOp_je_Jb)
3981{
3982 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3983 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3984 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3985
3986 IEM_MC_BEGIN(0, 0, 0, 0);
3987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3988 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3989 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3990 } IEM_MC_ELSE() {
3991 IEM_MC_ADVANCE_RIP_AND_FINISH();
3992 } IEM_MC_ENDIF();
3993 IEM_MC_END();
3994}
3995
3996
3997/**
3998 * @opcode 0x75
3999 * @opfltest zf
4000 */
4001FNIEMOP_DEF(iemOp_jne_Jb)
4002{
4003 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
4004 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4005 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4006
4007 IEM_MC_BEGIN(0, 0, 0, 0);
4008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4009 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4010 IEM_MC_ADVANCE_RIP_AND_FINISH();
4011 } IEM_MC_ELSE() {
4012 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4013 } IEM_MC_ENDIF();
4014 IEM_MC_END();
4015}
4016
4017
4018/**
4019 * @opcode 0x76
4020 * @opfltest cf,zf
4021 */
4022FNIEMOP_DEF(iemOp_jbe_Jb)
4023{
4024 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
4025 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4026 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4027
4028 IEM_MC_BEGIN(0, 0, 0, 0);
4029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4030 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4031 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4032 } IEM_MC_ELSE() {
4033 IEM_MC_ADVANCE_RIP_AND_FINISH();
4034 } IEM_MC_ENDIF();
4035 IEM_MC_END();
4036}
4037
4038
4039/**
4040 * @opcode 0x77
4041 * @opfltest cf,zf
4042 */
4043FNIEMOP_DEF(iemOp_jnbe_Jb)
4044{
4045 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
4046 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4047 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4048
4049 IEM_MC_BEGIN(0, 0, 0, 0);
4050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4051 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4052 IEM_MC_ADVANCE_RIP_AND_FINISH();
4053 } IEM_MC_ELSE() {
4054 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4055 } IEM_MC_ENDIF();
4056 IEM_MC_END();
4057}
4058
4059
4060/**
4061 * @opcode 0x78
4062 * @opfltest sf
4063 */
4064FNIEMOP_DEF(iemOp_js_Jb)
4065{
4066 IEMOP_MNEMONIC(js_Jb, "js Jb");
4067 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4068 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4069
4070 IEM_MC_BEGIN(0, 0, 0, 0);
4071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4072 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4073 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4074 } IEM_MC_ELSE() {
4075 IEM_MC_ADVANCE_RIP_AND_FINISH();
4076 } IEM_MC_ENDIF();
4077 IEM_MC_END();
4078}
4079
4080
4081/**
4082 * @opcode 0x79
4083 * @opfltest sf
4084 */
4085FNIEMOP_DEF(iemOp_jns_Jb)
4086{
4087 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
4088 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4089 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4090
4091 IEM_MC_BEGIN(0, 0, 0, 0);
4092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4093 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4094 IEM_MC_ADVANCE_RIP_AND_FINISH();
4095 } IEM_MC_ELSE() {
4096 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4097 } IEM_MC_ENDIF();
4098 IEM_MC_END();
4099}
4100
4101
4102/**
4103 * @opcode 0x7a
4104 * @opfltest pf
4105 */
4106FNIEMOP_DEF(iemOp_jp_Jb)
4107{
4108 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
4109 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4110 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4111
4112 IEM_MC_BEGIN(0, 0, 0, 0);
4113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4114 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4115 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4116 } IEM_MC_ELSE() {
4117 IEM_MC_ADVANCE_RIP_AND_FINISH();
4118 } IEM_MC_ENDIF();
4119 IEM_MC_END();
4120}
4121
4122
4123/**
4124 * @opcode 0x7b
4125 * @opfltest pf
4126 */
4127FNIEMOP_DEF(iemOp_jnp_Jb)
4128{
4129 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
4130 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4131 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4132
4133 IEM_MC_BEGIN(0, 0, 0, 0);
4134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4135 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4136 IEM_MC_ADVANCE_RIP_AND_FINISH();
4137 } IEM_MC_ELSE() {
4138 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4139 } IEM_MC_ENDIF();
4140 IEM_MC_END();
4141}
4142
4143
4144/**
4145 * @opcode 0x7c
4146 * @opfltest sf,of
4147 */
4148FNIEMOP_DEF(iemOp_jl_Jb)
4149{
4150 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
4151 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4152 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4153
4154 IEM_MC_BEGIN(0, 0, 0, 0);
4155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4156 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4157 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4158 } IEM_MC_ELSE() {
4159 IEM_MC_ADVANCE_RIP_AND_FINISH();
4160 } IEM_MC_ENDIF();
4161 IEM_MC_END();
4162}
4163
4164
4165/**
4166 * @opcode 0x7d
4167 * @opfltest sf,of
4168 */
4169FNIEMOP_DEF(iemOp_jnl_Jb)
4170{
4171 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
4172 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4173 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4174
4175 IEM_MC_BEGIN(0, 0, 0, 0);
4176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4177 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4178 IEM_MC_ADVANCE_RIP_AND_FINISH();
4179 } IEM_MC_ELSE() {
4180 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4181 } IEM_MC_ENDIF();
4182 IEM_MC_END();
4183}
4184
4185
4186/**
4187 * @opcode 0x7e
4188 * @opfltest zf,sf,of
4189 */
4190FNIEMOP_DEF(iemOp_jle_Jb)
4191{
4192 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
4193 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4194 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4195
4196 IEM_MC_BEGIN(0, 0, 0, 0);
4197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4198 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4199 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4200 } IEM_MC_ELSE() {
4201 IEM_MC_ADVANCE_RIP_AND_FINISH();
4202 } IEM_MC_ENDIF();
4203 IEM_MC_END();
4204}
4205
4206
4207/**
4208 * @opcode 0x7f
4209 * @opfltest zf,sf,of
4210 */
4211FNIEMOP_DEF(iemOp_jnle_Jb)
4212{
4213 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
4214 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4215 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4216
4217 IEM_MC_BEGIN(0, 0, 0, 0);
4218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4219 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4220 IEM_MC_ADVANCE_RIP_AND_FINISH();
4221 } IEM_MC_ELSE() {
4222 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4223 } IEM_MC_ENDIF();
4224 IEM_MC_END();
4225}
4226
4227
4228/**
4229 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4230 * iemOp_Grp1_Eb_Ib_80.
4231 */
4232#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_InsNm, a_fRegNativeArchs, a_fMemNativeArchs) \
4233 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4234 { \
4235 /* register target */ \
4236 IEM_MC_BEGIN(3, 2, 0, 0); \
4237 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4239 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
4240 IEM_MC_LOCAL(uint8_t, u8Dst); \
4241 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4242 IEM_MC_LOCAL(uint32_t, uEFlags); \
4243 IEM_MC_FETCH_EFLAGS(uEFlags); \
4244 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u8Dst, u8Imm, uEFlags, 8, 8); \
4245 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Dst); \
4246 IEM_MC_COMMIT_EFLAGS(uEFlags); \
4247 } IEM_MC_NATIVE_ELSE() { \
4248 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4249 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4250 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4251 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4252 IEM_MC_REF_EFLAGS(pEFlags); \
4253 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
4254 } IEM_MC_NATIVE_ENDIF(); \
4255 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4256 IEM_MC_END(); \
4257 } \
4258 else \
4259 { \
4260 /* memory target */ \
4261 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4262 { \
4263 IEM_MC_BEGIN(3, 3, 0, 0); \
4264 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4265 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4267 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4268 \
4269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4270 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4271 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4272 IEMOP_HLP_DONE_DECODING(); \
4273 \
4274 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4275 IEM_MC_FETCH_EFLAGS(EFlags); \
4276 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
4277 \
4278 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4279 IEM_MC_COMMIT_EFLAGS(EFlags); \
4280 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4281 IEM_MC_END(); \
4282 } \
4283 else \
4284 { \
4285 IEM_MC_BEGIN(3, 3, 0, 0); \
4286 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4287 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4289 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4290 \
4291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4292 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4293 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4294 IEMOP_HLP_DONE_DECODING(); \
4295 \
4296 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4297 IEM_MC_FETCH_EFLAGS(EFlags); \
4298 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8_locked), pu8Dst, u8Src, pEFlags); \
4299 \
4300 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4301 IEM_MC_COMMIT_EFLAGS(EFlags); \
4302 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4303 IEM_MC_END(); \
4304 } \
4305 } \
4306 (void)0
4307
4308#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_InsNm, a_fNativeArchs) \
4309 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4310 { \
4311 /* register target */ \
4312 IEM_MC_BEGIN(3, 2, 0, 0); \
4313 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4315 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4316 IEM_MC_LOCAL(uint8_t, u8Dst); \
4317 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4318 IEM_MC_LOCAL(uint32_t, uEFlags); \
4319 IEM_MC_FETCH_EFLAGS(uEFlags); \
4320 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u8Dst, u8Imm, uEFlags, 8, 8); \
4321 IEM_MC_COMMIT_EFLAGS(uEFlags); \
4322 } IEM_MC_NATIVE_ELSE() { \
4323 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4324 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
4325 IEM_MC_REF_GREG_U8_CONST(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4326 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4327 IEM_MC_REF_EFLAGS(pEFlags); \
4328 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
4329 } IEM_MC_NATIVE_ENDIF(); \
4330 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4331 IEM_MC_END(); \
4332 } \
4333 else \
4334 { \
4335 /* memory target */ \
4336 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4337 { \
4338 IEM_MC_BEGIN(3, 3, 0, 0); \
4339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4341 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4342 IEMOP_HLP_DONE_DECODING(); \
4343 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4344 IEM_MC_LOCAL(uint8_t, u8Dst); \
4345 IEM_MC_FETCH_MEM_U8(u8Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4346 IEM_MC_LOCAL(uint32_t, uEFlags); \
4347 IEM_MC_FETCH_EFLAGS(uEFlags); \
4348 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u8Dst, u8Imm, uEFlags, 8, 8); \
4349 IEM_MC_COMMIT_EFLAGS(uEFlags); \
4350 } IEM_MC_NATIVE_ELSE() { \
4351 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4352 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
4353 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4354 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4355 IEM_MC_FETCH_EFLAGS(EFlags); \
4356 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4357 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \
4358 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4359 IEM_MC_COMMIT_EFLAGS(EFlags); \
4360 } IEM_MC_NATIVE_ENDIF(); \
4361 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4362 IEM_MC_END(); \
4363 } \
4364 else \
4365 { \
4366 IEMOP_HLP_DONE_DECODING(); \
4367 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4368 } \
4369 } \
4370 (void)0
4371
4372
4373
4374/**
4375 * @opmaps grp1_80,grp1_83
4376 * @opcode /0
4377 * @opflclass arithmetic
4378 */
4379FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
4380{
4381 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
4382 IEMOP_BODY_BINARY_Eb_Ib_RW(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4383}
4384
4385
4386/**
4387 * @opmaps grp1_80,grp1_83
4388 * @opcode /1
4389 * @opflclass logical
4390 */
4391FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
4392{
4393 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
4394 IEMOP_BODY_BINARY_Eb_Ib_RW(or, 0, 0);
4395}
4396
4397
4398/**
4399 * @opmaps grp1_80,grp1_83
4400 * @opcode /2
4401 * @opflclass arithmetic_carry
4402 */
4403FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4404{
4405 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4406 IEMOP_BODY_BINARY_Eb_Ib_RW(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4407}
4408
4409
4410/**
4411 * @opmaps grp1_80,grp1_83
4412 * @opcode /3
4413 * @opflclass arithmetic_carry
4414 */
4415FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4416{
4417 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4418 IEMOP_BODY_BINARY_Eb_Ib_RW(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4419}
4420
4421
4422/**
4423 * @opmaps grp1_80,grp1_83
4424 * @opcode /4
4425 * @opflclass logical
4426 */
4427FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4428{
4429 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4430 IEMOP_BODY_BINARY_Eb_Ib_RW(and, 0, 0);
4431}
4432
4433
4434/**
4435 * @opmaps grp1_80,grp1_83
4436 * @opcode /5
4437 * @opflclass arithmetic
4438 */
4439FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4440{
4441 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4442 IEMOP_BODY_BINARY_Eb_Ib_RW(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4443}
4444
4445
4446/**
4447 * @opmaps grp1_80,grp1_83
4448 * @opcode /6
4449 * @opflclass logical
4450 */
4451FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4452{
4453 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4454 IEMOP_BODY_BINARY_Eb_Ib_RW(xor, 0, 0);
4455}
4456
4457
4458/**
4459 * @opmaps grp1_80,grp1_83
4460 * @opcode /7
4461 * @opflclass arithmetic
4462 */
4463FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4464{
4465 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4466 IEMOP_BODY_BINARY_Eb_Ib_RO(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
4467}
4468
4469
4470/**
4471 * @opcode 0x80
4472 */
4473FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4474{
4475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4476 switch (IEM_GET_MODRM_REG_8(bRm))
4477 {
4478 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4479 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4480 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4481 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4482 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4483 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4484 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4485 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4486 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4487 }
4488}
4489
4490
4491/**
4492 * Body for a group 1 binary operator.
4493 */
4494#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_InsNm, a_fRegNativeArchs, a_fMemNativeArchs) \
4495 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4496 { \
4497 /* register target */ \
4498 switch (pVCpu->iem.s.enmEffOpSize) \
4499 { \
4500 case IEMMODE_16BIT: \
4501 { \
4502 IEM_MC_BEGIN(3, 2, 0, 0); \
4503 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4505 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
4506 IEM_MC_LOCAL(uint16_t, u16Dst); \
4507 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4508 IEM_MC_LOCAL(uint32_t, uEFlags); \
4509 IEM_MC_FETCH_EFLAGS(uEFlags); \
4510 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
4511 IEM_MC_COMMIT_EFLAGS(uEFlags); \
4512 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Dst); \
4513 } IEM_MC_NATIVE_ELSE() { \
4514 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4515 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4516 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4517 IEM_MC_REF_EFLAGS(pEFlags); \
4518 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4519 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
4520 } IEM_MC_NATIVE_ENDIF(); \
4521 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4522 IEM_MC_END(); \
4523 break; \
4524 } \
4525 \
4526 case IEMMODE_32BIT: \
4527 { \
4528 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0); \
4529 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4531 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
4532 IEM_MC_LOCAL(uint32_t, u32Dst); \
4533 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4534 IEM_MC_LOCAL(uint32_t, uEFlags); \
4535 IEM_MC_FETCH_EFLAGS(uEFlags); \
4536 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
4537 IEM_MC_COMMIT_EFLAGS(uEFlags); \
4538 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst); \
4539 } IEM_MC_NATIVE_ELSE() { \
4540 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4541 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4542 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4543 IEM_MC_REF_EFLAGS(pEFlags); \
4544 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4545 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
4546 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4547 } IEM_MC_NATIVE_ENDIF(); \
4548 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4549 IEM_MC_END(); \
4550 break; \
4551 } \
4552 \
4553 case IEMMODE_64BIT: \
4554 { \
4555 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0); \
4556 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4558 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
4559 IEM_MC_LOCAL(uint64_t, u64Dst); \
4560 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4561 IEM_MC_LOCAL(uint32_t, uEFlags); \
4562 IEM_MC_FETCH_EFLAGS(uEFlags); \
4563 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
4564 IEM_MC_COMMIT_EFLAGS(uEFlags); \
4565 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst); \
4566 } IEM_MC_NATIVE_ELSE() { \
4567 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4568 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4569 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4570 IEM_MC_REF_EFLAGS(pEFlags); \
4571 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4572 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
4573 } IEM_MC_NATIVE_ENDIF(); \
4574 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4575 IEM_MC_END(); \
4576 break; \
4577 } \
4578 \
4579 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4580 } \
4581 } \
4582 else \
4583 { \
4584 /* memory target */ \
4585 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4586 { \
4587 switch (pVCpu->iem.s.enmEffOpSize) \
4588 { \
4589 case IEMMODE_16BIT: \
4590 { \
4591 IEM_MC_BEGIN(3, 3, 0, 0); \
4592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4594 \
4595 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4596 IEMOP_HLP_DONE_DECODING(); \
4597 \
4598 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4599 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4600 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4601 \
4602 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4603 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4604 IEM_MC_FETCH_EFLAGS(EFlags); \
4605 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
4606 \
4607 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4608 IEM_MC_COMMIT_EFLAGS(EFlags); \
4609 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4610 IEM_MC_END(); \
4611 break; \
4612 } \
4613 \
4614 case IEMMODE_32BIT: \
4615 { \
4616 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4619 \
4620 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4621 IEMOP_HLP_DONE_DECODING(); \
4622 \
4623 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4624 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4625 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4626 \
4627 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4628 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4629 IEM_MC_FETCH_EFLAGS(EFlags); \
4630 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
4631 \
4632 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4633 IEM_MC_COMMIT_EFLAGS(EFlags); \
4634 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4635 IEM_MC_END(); \
4636 break; \
4637 } \
4638 \
4639 case IEMMODE_64BIT: \
4640 { \
4641 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4642 \
4643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4645 \
4646 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4647 IEMOP_HLP_DONE_DECODING(); \
4648 \
4649 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4650 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4651 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4652 \
4653 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4654 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4655 IEM_MC_FETCH_EFLAGS(EFlags); \
4656 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
4657 \
4658 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4659 IEM_MC_COMMIT_EFLAGS(EFlags); \
4660 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4661 IEM_MC_END(); \
4662 break; \
4663 } \
4664 \
4665 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4666 } \
4667 } \
4668 else \
4669 { \
4670 switch (pVCpu->iem.s.enmEffOpSize) \
4671 { \
4672 case IEMMODE_16BIT: \
4673 { \
4674 IEM_MC_BEGIN(3, 3, 0, 0); \
4675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4677 \
4678 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4679 IEMOP_HLP_DONE_DECODING(); \
4680 \
4681 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4682 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4683 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4684 \
4685 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4686 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4687 IEM_MC_FETCH_EFLAGS(EFlags); \
4688 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16_locked), pu16Dst, u16Src, pEFlags); \
4689 \
4690 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4691 IEM_MC_COMMIT_EFLAGS(EFlags); \
4692 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4693 IEM_MC_END(); \
4694 break; \
4695 } \
4696 \
4697 case IEMMODE_32BIT: \
4698 { \
4699 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4702 \
4703 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4704 IEMOP_HLP_DONE_DECODING(); \
4705 \
4706 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4707 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4708 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4709 \
4710 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4711 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4712 IEM_MC_FETCH_EFLAGS(EFlags); \
4713 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32_locked), pu32Dst, u32Src, pEFlags); \
4714 \
4715 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4716 IEM_MC_COMMIT_EFLAGS(EFlags); \
4717 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4718 IEM_MC_END(); \
4719 break; \
4720 } \
4721 \
4722 case IEMMODE_64BIT: \
4723 { \
4724 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4727 \
4728 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4729 IEMOP_HLP_DONE_DECODING(); \
4730 \
4731 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4732 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4733 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4734 \
4735 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4736 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4737 IEM_MC_FETCH_EFLAGS(EFlags); \
4738 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64_locked), pu64Dst, u64Src, pEFlags); \
4739 \
4740 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4741 IEM_MC_COMMIT_EFLAGS(EFlags); \
4742 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4743 IEM_MC_END(); \
4744 break; \
4745 } \
4746 \
4747 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4748 } \
4749 } \
4750 } \
4751 (void)0
4752
4753/* read-only version */
4754#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_InsNm, a_fNativeArchs) \
4755 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4756 { \
4757 /* register target */ \
4758 switch (pVCpu->iem.s.enmEffOpSize) \
4759 { \
4760 case IEMMODE_16BIT: \
4761 { \
4762 IEM_MC_BEGIN(3, 2, 0, 0); \
4763 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4765 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4766 IEM_MC_LOCAL(uint16_t, u16Dst); \
4767 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4768 IEM_MC_LOCAL(uint32_t, uEFlags); \
4769 IEM_MC_FETCH_EFLAGS(uEFlags); \
4770 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
4771 IEM_MC_COMMIT_EFLAGS(uEFlags); \
4772 } IEM_MC_NATIVE_ELSE() { \
4773 IEM_MC_ARG(uint16_t const *,pu16Dst, 0); \
4774 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4775 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4776 IEM_MC_REF_EFLAGS(pEFlags); \
4777 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4778 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
4779 } IEM_MC_NATIVE_ENDIF(); \
4780 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4781 IEM_MC_END(); \
4782 break; \
4783 } \
4784 \
4785 case IEMMODE_32BIT: \
4786 { \
4787 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0); \
4788 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4790 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4791 IEM_MC_LOCAL(uint32_t, u32Dst); \
4792 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4793 IEM_MC_LOCAL(uint32_t, uEFlags); \
4794 IEM_MC_FETCH_EFLAGS(uEFlags); \
4795 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
4796 IEM_MC_COMMIT_EFLAGS(uEFlags); \
4797 } IEM_MC_NATIVE_ELSE() { \
4798 IEM_MC_ARG(uint32_t const *,pu32Dst, 0); \
4799 IEM_MC_REF_GREG_U32_CONST (pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4800 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4801 IEM_MC_REF_EFLAGS(pEFlags); \
4802 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4803 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
4804 } IEM_MC_NATIVE_ENDIF(); \
4805 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4806 IEM_MC_END(); \
4807 break; \
4808 } \
4809 \
4810 case IEMMODE_64BIT: \
4811 { \
4812 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0); \
4813 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4815 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4816 IEM_MC_LOCAL(uint64_t, u64Dst); \
4817 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4818 IEM_MC_LOCAL(uint32_t, uEFlags); \
4819 IEM_MC_FETCH_EFLAGS(uEFlags); \
4820 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
4821 IEM_MC_COMMIT_EFLAGS(uEFlags); \
4822 } IEM_MC_NATIVE_ELSE() { \
4823 IEM_MC_ARG(uint64_t const *,pu64Dst, 0); \
4824 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4825 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4826 IEM_MC_REF_EFLAGS(pEFlags); \
4827 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4828 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
4829 } IEM_MC_NATIVE_ENDIF(); \
4830 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4831 IEM_MC_END(); \
4832 break; \
4833 } \
4834 \
4835 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4836 } \
4837 } \
4838 else \
4839 { \
4840 /* memory target */ \
4841 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4842 { \
4843 switch (pVCpu->iem.s.enmEffOpSize) \
4844 { \
4845 case IEMMODE_16BIT: \
4846 { \
4847 IEM_MC_BEGIN(3, 3, 0, 0); \
4848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4850 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4851 IEMOP_HLP_DONE_DECODING(); \
4852 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4853 IEM_MC_LOCAL(uint16_t, u16Dst); \
4854 IEM_MC_FETCH_MEM_U16(u16Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4855 IEM_MC_LOCAL(uint32_t, uEFlags); \
4856 IEM_MC_FETCH_EFLAGS(uEFlags); \
4857 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u16Dst, u16Imm, uEFlags, 16, 16); \
4858 IEM_MC_COMMIT_EFLAGS(uEFlags); \
4859 } IEM_MC_NATIVE_ELSE() { \
4860 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4861 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4862 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4863 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4864 IEM_MC_FETCH_EFLAGS(EFlags); \
4865 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4866 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \
4867 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4868 IEM_MC_COMMIT_EFLAGS(EFlags); \
4869 } IEM_MC_NATIVE_ENDIF(); \
4870 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4871 IEM_MC_END(); \
4872 break; \
4873 } \
4874 \
4875 case IEMMODE_32BIT: \
4876 { \
4877 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4880 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4881 IEMOP_HLP_DONE_DECODING(); \
4882 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4883 IEM_MC_LOCAL(uint32_t, u32Dst); \
4884 IEM_MC_FETCH_MEM_U32(u32Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4885 IEM_MC_LOCAL(uint32_t, uEFlags); \
4886 IEM_MC_FETCH_EFLAGS(uEFlags); \
4887 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u32Dst, u32Imm, uEFlags, 32, 32); \
4888 IEM_MC_COMMIT_EFLAGS(uEFlags); \
4889 } IEM_MC_NATIVE_ELSE() { \
4890 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4891 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4892 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4893 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4894 IEM_MC_FETCH_EFLAGS(EFlags); \
4895 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4896 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \
4897 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4898 IEM_MC_COMMIT_EFLAGS(EFlags); \
4899 } IEM_MC_NATIVE_ENDIF(); \
4900 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4901 IEM_MC_END(); \
4902 break; \
4903 } \
4904 \
4905 case IEMMODE_64BIT: \
4906 { \
4907 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4910 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4911 IEMOP_HLP_DONE_DECODING(); \
4912 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
4913 IEM_MC_LOCAL(uint64_t, u64Dst); \
4914 IEM_MC_FETCH_MEM_U64(u64Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4915 IEM_MC_LOCAL(uint32_t, uEFlags); \
4916 IEM_MC_FETCH_EFLAGS(uEFlags); \
4917 IEM_MC_NATIVE_EMIT_5(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_i_efl), u64Dst, u64Imm, uEFlags, 64, 32); \
4918 IEM_MC_COMMIT_EFLAGS(uEFlags); \
4919 } IEM_MC_NATIVE_ELSE() { \
4920 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4921 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4922 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4923 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4924 IEM_MC_FETCH_EFLAGS(EFlags); \
4925 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4926 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \
4927 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4928 IEM_MC_COMMIT_EFLAGS(EFlags); \
4929 } IEM_MC_NATIVE_ENDIF(); \
4930 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4931 IEM_MC_END(); \
4932 break; \
4933 } \
4934 \
4935 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4936 } \
4937 } \
4938 else \
4939 { \
4940 IEMOP_HLP_DONE_DECODING(); \
4941 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4942 } \
4943 } \
4944 (void)0
4945
4946
4947/**
4948 * @opmaps grp1_81
4949 * @opcode /0
4950 * @opflclass arithmetic
4951 */
4952FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4953{
4954 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4955 IEMOP_BODY_BINARY_Ev_Iz_RW(add, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4956}
4957
4958
4959/**
4960 * @opmaps grp1_81
4961 * @opcode /1
4962 * @opflclass logical
4963 */
4964FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4965{
4966 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4967 IEMOP_BODY_BINARY_Ev_Iz_RW(or, 0, 0);
4968}
4969
4970
4971/**
4972 * @opmaps grp1_81
4973 * @opcode /2
4974 * @opflclass arithmetic_carry
4975 */
4976FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4977{
4978 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4979 IEMOP_BODY_BINARY_Ev_Iz_RW(adc, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4980}
4981
4982
4983/**
4984 * @opmaps grp1_81
4985 * @opcode /3
4986 * @opflclass arithmetic_carry
4987 */
4988FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4989{
4990 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4991 IEMOP_BODY_BINARY_Ev_Iz_RW(sbb, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
4992}
4993
4994
4995/**
4996 * @opmaps grp1_81
4997 * @opcode /4
4998 * @opflclass logical
4999 */
5000FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
5001{
5002 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
5003 IEMOP_BODY_BINARY_Ev_Iz_RW(and, 0, 0);
5004}
5005
5006
5007/**
5008 * @opmaps grp1_81
5009 * @opcode /5
5010 * @opflclass arithmetic
5011 */
5012FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
5013{
5014 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
5015 IEMOP_BODY_BINARY_Ev_Iz_RW(sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
5016}
5017
5018
5019/**
5020 * @opmaps grp1_81
5021 * @opcode /6
5022 * @opflclass logical
5023 */
5024FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
5025{
5026 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
5027 IEMOP_BODY_BINARY_Ev_Iz_RW(xor, 0, 0);
5028}
5029
5030
5031/**
5032 * @opmaps grp1_81
5033 * @opcode /7
5034 * @opflclass arithmetic
5035 */
5036FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
5037{
5038 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
5039 IEMOP_BODY_BINARY_Ev_Iz_RO(cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5040}
5041
5042
5043/**
5044 * @opcode 0x81
5045 */
5046FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
5047{
5048 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5049 switch (IEM_GET_MODRM_REG_8(bRm))
5050 {
5051 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
5052 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
5053 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
5054 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
5055 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
5056 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
5057 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
5058 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
5059 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5060 }
5061}
5062
5063
5064/**
5065 * @opcode 0x82
5066 * @opmnemonic grp1_82
5067 * @opgroup og_groups
5068 */
5069FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
5070{
5071 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
5072 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
5073}
5074
5075
5076/**
5077 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
5078 * iemOp_Grp1_Ev_Ib.
5079 */
5080#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
5081 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5082 { \
5083 /* \
5084 * Register target \
5085 */ \
5086 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5087 switch (pVCpu->iem.s.enmEffOpSize) \
5088 { \
5089 case IEMMODE_16BIT: \
5090 IEM_MC_BEGIN(3, 0, 0, 0); \
5091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5092 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
5093 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5094 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5095 \
5096 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5097 IEM_MC_REF_EFLAGS(pEFlags); \
5098 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
5099 \
5100 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5101 IEM_MC_END(); \
5102 break; \
5103 \
5104 case IEMMODE_32BIT: \
5105 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
5106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5107 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
5108 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5109 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5110 \
5111 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5112 IEM_MC_REF_EFLAGS(pEFlags); \
5113 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
5114 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
5115 \
5116 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5117 IEM_MC_END(); \
5118 break; \
5119 \
5120 case IEMMODE_64BIT: \
5121 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
5122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5123 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
5124 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5125 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5126 \
5127 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5128 IEM_MC_REF_EFLAGS(pEFlags); \
5129 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5130 \
5131 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5132 IEM_MC_END(); \
5133 break; \
5134 \
5135 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5136 } \
5137 } \
5138 else \
5139 { \
5140 /* \
5141 * Memory target. \
5142 */ \
5143 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
5144 { \
5145 switch (pVCpu->iem.s.enmEffOpSize) \
5146 { \
5147 case IEMMODE_16BIT: \
5148 IEM_MC_BEGIN(3, 3, 0, 0); \
5149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5151 \
5152 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5153 IEMOP_HLP_DONE_DECODING(); \
5154 \
5155 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5156 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
5157 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5158 \
5159 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5160 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5161 IEM_MC_FETCH_EFLAGS(EFlags); \
5162 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
5163 \
5164 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
5165 IEM_MC_COMMIT_EFLAGS(EFlags); \
5166 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5167 IEM_MC_END(); \
5168 break; \
5169 \
5170 case IEMMODE_32BIT: \
5171 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
5172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5174 \
5175 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5176 IEMOP_HLP_DONE_DECODING(); \
5177 \
5178 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5179 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
5180 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5181 \
5182 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5183 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5184 IEM_MC_FETCH_EFLAGS(EFlags); \
5185 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
5186 \
5187 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
5188 IEM_MC_COMMIT_EFLAGS(EFlags); \
5189 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5190 IEM_MC_END(); \
5191 break; \
5192 \
5193 case IEMMODE_64BIT: \
5194 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5197 \
5198 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5199 IEMOP_HLP_DONE_DECODING(); \
5200 \
5201 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5202 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
5203 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5204 \
5205 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5206 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5207 IEM_MC_FETCH_EFLAGS(EFlags); \
5208 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5209 \
5210 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
5211 IEM_MC_COMMIT_EFLAGS(EFlags); \
5212 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5213 IEM_MC_END(); \
5214 break; \
5215 \
5216 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5217 } \
5218 } \
5219 else \
5220 { \
5221 (void)0
5222/* Separate macro to work around parsing issue in IEMAllInstPython.py */
5223#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
5224 switch (pVCpu->iem.s.enmEffOpSize) \
5225 { \
5226 case IEMMODE_16BIT: \
5227 IEM_MC_BEGIN(3, 3, 0, 0); \
5228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5230 \
5231 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5232 IEMOP_HLP_DONE_DECODING(); \
5233 \
5234 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5235 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
5236 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5237 \
5238 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5239 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5240 IEM_MC_FETCH_EFLAGS(EFlags); \
5241 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
5242 \
5243 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5244 IEM_MC_COMMIT_EFLAGS(EFlags); \
5245 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5246 IEM_MC_END(); \
5247 break; \
5248 \
5249 case IEMMODE_32BIT: \
5250 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
5251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5253 \
5254 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5255 IEMOP_HLP_DONE_DECODING(); \
5256 \
5257 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5258 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
5259 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5260 \
5261 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5262 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5263 IEM_MC_FETCH_EFLAGS(EFlags); \
5264 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
5265 \
5266 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5267 IEM_MC_COMMIT_EFLAGS(EFlags); \
5268 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5269 IEM_MC_END(); \
5270 break; \
5271 \
5272 case IEMMODE_64BIT: \
5273 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5274 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5276 \
5277 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5278 IEMOP_HLP_DONE_DECODING(); \
5279 \
5280 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5281 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
5282 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5283 \
5284 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5285 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5286 IEM_MC_FETCH_EFLAGS(EFlags); \
5287 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
5288 \
5289 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5290 IEM_MC_COMMIT_EFLAGS(EFlags); \
5291 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5292 IEM_MC_END(); \
5293 break; \
5294 \
5295 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5296 } \
5297 } \
5298 } \
5299 (void)0
5300
5301/* read-only variant */
5302#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
5303 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5304 { \
5305 /* \
5306 * Register target \
5307 */ \
5308 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5309 switch (pVCpu->iem.s.enmEffOpSize) \
5310 { \
5311 case IEMMODE_16BIT: \
5312 IEM_MC_BEGIN(3, 0, 0, 0); \
5313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5314 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
5315 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5316 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5317 \
5318 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5319 IEM_MC_REF_EFLAGS(pEFlags); \
5320 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
5321 \
5322 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5323 IEM_MC_END(); \
5324 break; \
5325 \
5326 case IEMMODE_32BIT: \
5327 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
5328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5329 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
5330 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5331 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5332 \
5333 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5334 IEM_MC_REF_EFLAGS(pEFlags); \
5335 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
5336 \
5337 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5338 IEM_MC_END(); \
5339 break; \
5340 \
5341 case IEMMODE_64BIT: \
5342 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
5343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5344 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
5345 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5346 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5347 \
5348 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5349 IEM_MC_REF_EFLAGS(pEFlags); \
5350 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5351 \
5352 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5353 IEM_MC_END(); \
5354 break; \
5355 \
5356 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5357 } \
5358 } \
5359 else \
5360 { \
5361 /* \
5362 * Memory target. \
5363 */ \
5364 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
5365 { \
5366 switch (pVCpu->iem.s.enmEffOpSize) \
5367 { \
5368 case IEMMODE_16BIT: \
5369 IEM_MC_BEGIN(3, 3, 0, 0); \
5370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5372 \
5373 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5374 IEMOP_HLP_DONE_DECODING(); \
5375 \
5376 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5377 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
5378 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5379 \
5380 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5381 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5382 IEM_MC_FETCH_EFLAGS(EFlags); \
5383 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
5384 \
5385 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5386 IEM_MC_COMMIT_EFLAGS(EFlags); \
5387 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5388 IEM_MC_END(); \
5389 break; \
5390 \
5391 case IEMMODE_32BIT: \
5392 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
5393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5395 \
5396 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5397 IEMOP_HLP_DONE_DECODING(); \
5398 \
5399 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5400 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
5401 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5402 \
5403 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5404 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5405 IEM_MC_FETCH_EFLAGS(EFlags); \
5406 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
5407 \
5408 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5409 IEM_MC_COMMIT_EFLAGS(EFlags); \
5410 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5411 IEM_MC_END(); \
5412 break; \
5413 \
5414 case IEMMODE_64BIT: \
5415 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5418 \
5419 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5420 IEMOP_HLP_DONE_DECODING(); \
5421 \
5422 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5423 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
5424 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5425 \
5426 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5427 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5428 IEM_MC_FETCH_EFLAGS(EFlags); \
5429 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5430 \
5431 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5432 IEM_MC_COMMIT_EFLAGS(EFlags); \
5433 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5434 IEM_MC_END(); \
5435 break; \
5436 \
5437 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5438 } \
5439 } \
5440 else \
5441 { \
5442 IEMOP_HLP_DONE_DECODING(); \
5443 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5444 } \
5445 } \
5446 (void)0
5447
5448/**
5449 * @opmaps grp1_83
5450 * @opcode /0
5451 * @opflclass arithmetic
5452 */
5453FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5454{
5455 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5456 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
5457 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
5458}
5459
5460
5461/**
5462 * @opmaps grp1_83
5463 * @opcode /1
5464 * @opflclass logical
5465 */
5466FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5467{
5468 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5469 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
5470 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
5471}
5472
5473
5474/**
5475 * @opmaps grp1_83
5476 * @opcode /2
5477 * @opflclass arithmetic_carry
5478 */
5479FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5480{
5481 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5482 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5483 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5484}
5485
5486
5487/**
5488 * @opmaps grp1_83
5489 * @opcode /3
5490 * @opflclass arithmetic_carry
5491 */
5492FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5493{
5494 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5495 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5496 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5497}
5498
5499
5500/**
5501 * @opmaps grp1_83
5502 * @opcode /4
5503 * @opflclass logical
5504 */
5505FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5506{
5507 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5508 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5509 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5510}
5511
5512
5513/**
5514 * @opmaps grp1_83
5515 * @opcode /5
5516 * @opflclass arithmetic
5517 */
5518FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5519{
5520 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5521 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5522 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5523}
5524
5525
5526/**
5527 * @opmaps grp1_83
5528 * @opcode /6
5529 * @opflclass logical
5530 */
5531FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5532{
5533 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5534 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5535 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5536}
5537
5538
5539/**
5540 * @opmaps grp1_83
5541 * @opcode /7
5542 * @opflclass arithmetic
5543 */
5544FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5545{
5546 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5547 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5548}
5549
5550
5551/**
5552 * @opcode 0x83
5553 */
5554FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5555{
5556 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5557 to the 386 even if absent in the intel reference manuals and some
5558 3rd party opcode listings. */
5559 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5560 switch (IEM_GET_MODRM_REG_8(bRm))
5561 {
5562 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5563 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5564 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5565 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5566 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5567 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5568 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5569 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5570 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5571 }
5572}
5573
5574
5575/**
5576 * @opcode 0x84
5577 * @opflclass logical
5578 */
5579FNIEMOP_DEF(iemOp_test_Eb_Gb)
5580{
5581 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5582 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5583
5584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5585
5586 /*
5587 * Deal with special case of 'test rN, rN' which is frequently used for testing for zero/non-zero registers.
5588 * This block only makes a differences when emitting native code, where we'll save a register fetch.
5589 */
5590 if ( (bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT))
5591 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB)
5592 {
5593 IEM_MC_BEGIN(3, 0, 0, 0);
5594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5595 IEM_MC_ARG(uint8_t, u8Src, 1);
5596 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5597 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5598 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */
5599 IEM_MC_LOCAL(uint32_t, uEFlags);
5600 IEM_MC_FETCH_EFLAGS(uEFlags);
5601 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u8Src, u8Src, uEFlags, 8);
5602 IEM_MC_COMMIT_EFLAGS(uEFlags);
5603 } IEM_MC_NATIVE_ELSE() {
5604 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5605 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
5606 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5607 IEM_MC_REF_EFLAGS(pEFlags);
5608 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
5609 } IEM_MC_NATIVE_ENDIF();
5610 IEM_MC_ADVANCE_RIP_AND_FINISH();
5611 IEM_MC_END();
5612 }
5613
5614 IEMOP_BODY_BINARY_rm_r8_RO(bRm, iemAImpl_test_u8, test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5615}
5616
5617
5618/**
5619 * @opcode 0x85
5620 * @opflclass logical
5621 */
5622FNIEMOP_DEF(iemOp_test_Ev_Gv)
5623{
5624 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5625 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5626
5627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5628
5629 /*
5630 * Deal with special case of 'test rN, rN' which is frequently used for testing for zero/non-zero registers.
5631 * This block only makes a differences when emitting native code, where we'll save a register fetch.
5632 */
5633 if ( (bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT))
5634 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB)
5635 {
5636 switch (pVCpu->iem.s.enmEffOpSize)
5637 {
5638 case IEMMODE_16BIT:
5639 IEM_MC_BEGIN(3, 0, 0, 0);
5640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5641 IEM_MC_ARG(uint16_t, u16Src, 1);
5642 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5643 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5644 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */
5645 IEM_MC_LOCAL(uint32_t, uEFlags);
5646 IEM_MC_FETCH_EFLAGS(uEFlags);
5647 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u16Src, u16Src, uEFlags, 16);
5648 IEM_MC_COMMIT_EFLAGS(uEFlags);
5649 } IEM_MC_NATIVE_ELSE() {
5650 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5651 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
5652 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5653 IEM_MC_REF_EFLAGS(pEFlags);
5654 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
5655 } IEM_MC_NATIVE_ENDIF();
5656 IEM_MC_ADVANCE_RIP_AND_FINISH();
5657 IEM_MC_END();
5658 break;
5659
5660 case IEMMODE_32BIT:
5661 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
5662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5663 IEM_MC_ARG(uint32_t, u32Src, 1);
5664 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5665 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5666 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */
5667 IEM_MC_LOCAL(uint32_t, uEFlags);
5668 IEM_MC_FETCH_EFLAGS(uEFlags);
5669 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u32Src, u32Src, uEFlags, 32);
5670 IEM_MC_COMMIT_EFLAGS(uEFlags);
5671 } IEM_MC_NATIVE_ELSE() {
5672 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5673 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
5674 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5675 IEM_MC_REF_EFLAGS(pEFlags);
5676 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
5677 } IEM_MC_NATIVE_ENDIF();
5678 IEM_MC_ADVANCE_RIP_AND_FINISH();
5679 IEM_MC_END();
5680 break;
5681
5682 case IEMMODE_64BIT:
5683 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
5684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5685 IEM_MC_ARG(uint64_t, u64Src, 1);
5686 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5687 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5688 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */
5689 IEM_MC_LOCAL(uint32_t, uEFlags);
5690 IEM_MC_FETCH_EFLAGS(uEFlags);
5691 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u64Src, u64Src, uEFlags, 64);
5692 IEM_MC_COMMIT_EFLAGS(uEFlags);
5693 } IEM_MC_NATIVE_ELSE() {
5694 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5695 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
5696 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5697 IEM_MC_REF_EFLAGS(pEFlags);
5698 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
5699 } IEM_MC_NATIVE_ENDIF();
5700 IEM_MC_ADVANCE_RIP_AND_FINISH();
5701 IEM_MC_END();
5702 break;
5703
5704 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5705 }
5706 }
5707
5708 IEMOP_BODY_BINARY_rm_rv_RO(bRm, test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5709}
5710
5711
5712/**
5713 * @opcode 0x86
5714 */
5715FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5716{
5717 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5718 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5719
5720 /*
5721 * If rm is denoting a register, no more instruction bytes.
5722 */
5723 if (IEM_IS_MODRM_REG_MODE(bRm))
5724 {
5725 IEM_MC_BEGIN(0, 2, 0, 0);
5726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5727 IEM_MC_LOCAL(uint8_t, uTmp1);
5728 IEM_MC_LOCAL(uint8_t, uTmp2);
5729
5730 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5731 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5732 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5733 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5734
5735 IEM_MC_ADVANCE_RIP_AND_FINISH();
5736 IEM_MC_END();
5737 }
5738 else
5739 {
5740 /*
5741 * We're accessing memory.
5742 */
5743#define IEMOP_XCHG_BYTE(a_fnWorker, a_Style) \
5744 IEM_MC_BEGIN(2, 4, 0, 0); \
5745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5746 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5747 IEM_MC_LOCAL(uint8_t, uTmpReg); \
5748 IEM_MC_ARG(uint8_t *, pu8Mem, 0); \
5749 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1); \
5750 \
5751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5752 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5753 IEM_MC_MEM_MAP_U8_##a_Style(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5754 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5755 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker, pu8Mem, pu8Reg); \
5756 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Style(bUnmapInfo); \
5757 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5758 \
5759 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5760 IEM_MC_END()
5761
5762 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5763 {
5764 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_locked,ATOMIC);
5765 }
5766 else
5767 {
5768 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_unlocked,RW);
5769 }
5770 }
5771}
5772
5773
5774/**
5775 * @opcode 0x87
5776 */
5777FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5778{
5779 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5781
5782 /*
5783 * If rm is denoting a register, no more instruction bytes.
5784 */
5785 if (IEM_IS_MODRM_REG_MODE(bRm))
5786 {
5787 switch (pVCpu->iem.s.enmEffOpSize)
5788 {
5789 case IEMMODE_16BIT:
5790 IEM_MC_BEGIN(0, 2, 0, 0);
5791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5792 IEM_MC_LOCAL(uint16_t, uTmp1);
5793 IEM_MC_LOCAL(uint16_t, uTmp2);
5794
5795 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5796 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5797 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5798 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5799
5800 IEM_MC_ADVANCE_RIP_AND_FINISH();
5801 IEM_MC_END();
5802 break;
5803
5804 case IEMMODE_32BIT:
5805 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5807 IEM_MC_LOCAL(uint32_t, uTmp1);
5808 IEM_MC_LOCAL(uint32_t, uTmp2);
5809
5810 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5811 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5812 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5813 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5814
5815 IEM_MC_ADVANCE_RIP_AND_FINISH();
5816 IEM_MC_END();
5817 break;
5818
5819 case IEMMODE_64BIT:
5820 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5822 IEM_MC_LOCAL(uint64_t, uTmp1);
5823 IEM_MC_LOCAL(uint64_t, uTmp2);
5824
5825 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5826 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5827 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5828 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5829
5830 IEM_MC_ADVANCE_RIP_AND_FINISH();
5831 IEM_MC_END();
5832 break;
5833
5834 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5835 }
5836 }
5837 else
5838 {
5839 /*
5840 * We're accessing memory.
5841 */
5842#define IEMOP_XCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
5843 do { \
5844 switch (pVCpu->iem.s.enmEffOpSize) \
5845 { \
5846 case IEMMODE_16BIT: \
5847 IEM_MC_BEGIN(2, 4, 0, 0); \
5848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5849 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5850 IEM_MC_LOCAL(uint16_t, uTmpReg); \
5851 IEM_MC_ARG(uint16_t *, pu16Mem, 0); \
5852 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1); \
5853 \
5854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5855 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5856 IEM_MC_MEM_MAP_U16_##a_Type(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5857 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5858 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker16, pu16Mem, pu16Reg); \
5859 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5860 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5861 \
5862 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5863 IEM_MC_END(); \
5864 break; \
5865 \
5866 case IEMMODE_32BIT: \
5867 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0); \
5868 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5869 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5870 IEM_MC_LOCAL(uint32_t, uTmpReg); \
5871 IEM_MC_ARG(uint32_t *, pu32Mem, 0); \
5872 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1); \
5873 \
5874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5875 IEMOP_HLP_DONE_DECODING(); \
5876 IEM_MC_MEM_MAP_U32_##a_Type(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5877 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5878 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker32, pu32Mem, pu32Reg); \
5879 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5880 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5881 \
5882 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5883 IEM_MC_END(); \
5884 break; \
5885 \
5886 case IEMMODE_64BIT: \
5887 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0); \
5888 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5889 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5890 IEM_MC_LOCAL(uint64_t, uTmpReg); \
5891 IEM_MC_ARG(uint64_t *, pu64Mem, 0); \
5892 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1); \
5893 \
5894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5895 IEMOP_HLP_DONE_DECODING(); \
5896 IEM_MC_MEM_MAP_U64_##a_Type(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5897 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5898 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker64, pu64Mem, pu64Reg); \
5899 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5900 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5901 \
5902 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5903 IEM_MC_END(); \
5904 break; \
5905 \
5906 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5907 } \
5908 } while (0)
5909 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5910 {
5911 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_locked, iemAImpl_xchg_u32_locked, iemAImpl_xchg_u64_locked,ATOMIC);
5912 }
5913 else
5914 {
5915 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_unlocked, iemAImpl_xchg_u32_unlocked, iemAImpl_xchg_u64_unlocked,RW);
5916 }
5917 }
5918}
5919
5920
5921/**
5922 * @opcode 0x88
5923 */
5924FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5925{
5926 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5927
5928 uint8_t bRm;
5929 IEM_OPCODE_GET_NEXT_U8(&bRm);
5930
5931 /*
5932 * If rm is denoting a register, no more instruction bytes.
5933 */
5934 if (IEM_IS_MODRM_REG_MODE(bRm))
5935 {
5936 IEM_MC_BEGIN(0, 1, 0, 0);
5937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5938 IEM_MC_LOCAL(uint8_t, u8Value);
5939 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5940 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5941 IEM_MC_ADVANCE_RIP_AND_FINISH();
5942 IEM_MC_END();
5943 }
5944 else
5945 {
5946 /*
5947 * We're writing a register to memory.
5948 */
5949 IEM_MC_BEGIN(0, 2, 0, 0);
5950 IEM_MC_LOCAL(uint8_t, u8Value);
5951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5954 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5955 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5956 IEM_MC_ADVANCE_RIP_AND_FINISH();
5957 IEM_MC_END();
5958 }
5959}
5960
5961
5962/**
5963 * @opcode 0x89
5964 */
5965FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5966{
5967 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5968
5969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5970
5971 /*
5972 * If rm is denoting a register, no more instruction bytes.
5973 */
5974 if (IEM_IS_MODRM_REG_MODE(bRm))
5975 {
5976 switch (pVCpu->iem.s.enmEffOpSize)
5977 {
5978 case IEMMODE_16BIT:
5979 IEM_MC_BEGIN(0, 1, 0, 0);
5980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5981 IEM_MC_LOCAL(uint16_t, u16Value);
5982 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5983 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5984 IEM_MC_ADVANCE_RIP_AND_FINISH();
5985 IEM_MC_END();
5986 break;
5987
5988 case IEMMODE_32BIT:
5989 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5991 IEM_MC_LOCAL(uint32_t, u32Value);
5992 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5993 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5994 IEM_MC_ADVANCE_RIP_AND_FINISH();
5995 IEM_MC_END();
5996 break;
5997
5998 case IEMMODE_64BIT:
5999 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6001 IEM_MC_LOCAL(uint64_t, u64Value);
6002 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6003 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
6004 IEM_MC_ADVANCE_RIP_AND_FINISH();
6005 IEM_MC_END();
6006 break;
6007
6008 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6009 }
6010 }
6011 else
6012 {
6013 /*
6014 * We're writing a register to memory.
6015 */
6016 switch (pVCpu->iem.s.enmEffOpSize)
6017 {
6018 case IEMMODE_16BIT:
6019 IEM_MC_BEGIN(0, 2, 0, 0);
6020 IEM_MC_LOCAL(uint16_t, u16Value);
6021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6024 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6025 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
6026 IEM_MC_ADVANCE_RIP_AND_FINISH();
6027 IEM_MC_END();
6028 break;
6029
6030 case IEMMODE_32BIT:
6031 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6032 IEM_MC_LOCAL(uint32_t, u32Value);
6033 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6036 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6037 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
6038 IEM_MC_ADVANCE_RIP_AND_FINISH();
6039 IEM_MC_END();
6040 break;
6041
6042 case IEMMODE_64BIT:
6043 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6044 IEM_MC_LOCAL(uint64_t, u64Value);
6045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6048 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
6049 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
6050 IEM_MC_ADVANCE_RIP_AND_FINISH();
6051 IEM_MC_END();
6052 break;
6053
6054 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6055 }
6056 }
6057}
6058
6059
6060/**
6061 * @opcode 0x8a
6062 */
6063FNIEMOP_DEF(iemOp_mov_Gb_Eb)
6064{
6065 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
6066
6067 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6068
6069 /*
6070 * If rm is denoting a register, no more instruction bytes.
6071 */
6072 if (IEM_IS_MODRM_REG_MODE(bRm))
6073 {
6074 IEM_MC_BEGIN(0, 1, 0, 0);
6075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6076 IEM_MC_LOCAL(uint8_t, u8Value);
6077 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
6078 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
6079 IEM_MC_ADVANCE_RIP_AND_FINISH();
6080 IEM_MC_END();
6081 }
6082 else
6083 {
6084 /*
6085 * We're loading a register from memory.
6086 */
6087 IEM_MC_BEGIN(0, 2, 0, 0);
6088 IEM_MC_LOCAL(uint8_t, u8Value);
6089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6092 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6093 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
6094 IEM_MC_ADVANCE_RIP_AND_FINISH();
6095 IEM_MC_END();
6096 }
6097}
6098
6099
6100/**
6101 * @opcode 0x8b
6102 */
6103FNIEMOP_DEF(iemOp_mov_Gv_Ev)
6104{
6105 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
6106
6107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6108
6109 /*
6110 * If rm is denoting a register, no more instruction bytes.
6111 */
6112 if (IEM_IS_MODRM_REG_MODE(bRm))
6113 {
6114 switch (pVCpu->iem.s.enmEffOpSize)
6115 {
6116 case IEMMODE_16BIT:
6117 IEM_MC_BEGIN(0, 1, 0, 0);
6118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6119 IEM_MC_LOCAL(uint16_t, u16Value);
6120 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
6121 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
6122 IEM_MC_ADVANCE_RIP_AND_FINISH();
6123 IEM_MC_END();
6124 break;
6125
6126 case IEMMODE_32BIT:
6127 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6129 IEM_MC_LOCAL(uint32_t, u32Value);
6130 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
6131 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
6132 IEM_MC_ADVANCE_RIP_AND_FINISH();
6133 IEM_MC_END();
6134 break;
6135
6136 case IEMMODE_64BIT:
6137 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6139 IEM_MC_LOCAL(uint64_t, u64Value);
6140 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
6141 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
6142 IEM_MC_ADVANCE_RIP_AND_FINISH();
6143 IEM_MC_END();
6144 break;
6145
6146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6147 }
6148 }
6149 else
6150 {
6151 /*
6152 * We're loading a register from memory.
6153 */
6154 switch (pVCpu->iem.s.enmEffOpSize)
6155 {
6156 case IEMMODE_16BIT:
6157 IEM_MC_BEGIN(0, 2, 0, 0);
6158 IEM_MC_LOCAL(uint16_t, u16Value);
6159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6162 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6163 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
6164 IEM_MC_ADVANCE_RIP_AND_FINISH();
6165 IEM_MC_END();
6166 break;
6167
6168 case IEMMODE_32BIT:
6169 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6170 IEM_MC_LOCAL(uint32_t, u32Value);
6171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6174 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6175 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
6176 IEM_MC_ADVANCE_RIP_AND_FINISH();
6177 IEM_MC_END();
6178 break;
6179
6180 case IEMMODE_64BIT:
6181 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6182 IEM_MC_LOCAL(uint64_t, u64Value);
6183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6186 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6187 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
6188 IEM_MC_ADVANCE_RIP_AND_FINISH();
6189 IEM_MC_END();
6190 break;
6191
6192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6193 }
6194 }
6195}
6196
6197
6198/**
6199 * opcode 0x63
6200 * @todo Table fixme
6201 */
6202FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
6203{
6204 if (!IEM_IS_64BIT_CODE(pVCpu))
6205 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
6206 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6207 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
6208 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
6209}
6210
6211
6212/**
6213 * @opcode 0x8c
6214 */
6215FNIEMOP_DEF(iemOp_mov_Ev_Sw)
6216{
6217 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
6218
6219 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6220
6221 /*
6222 * Check that the destination register exists. The REX.R prefix is ignored.
6223 */
6224 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
6225 if (iSegReg > X86_SREG_GS)
6226 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6227
6228 /*
6229 * If rm is denoting a register, no more instruction bytes.
6230 * In that case, the operand size is respected and the upper bits are
6231 * cleared (starting with some pentium).
6232 */
6233 if (IEM_IS_MODRM_REG_MODE(bRm))
6234 {
6235 switch (pVCpu->iem.s.enmEffOpSize)
6236 {
6237 case IEMMODE_16BIT:
6238 IEM_MC_BEGIN(0, 1, 0, 0);
6239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6240 IEM_MC_LOCAL(uint16_t, u16Value);
6241 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
6242 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
6243 IEM_MC_ADVANCE_RIP_AND_FINISH();
6244 IEM_MC_END();
6245 break;
6246
6247 case IEMMODE_32BIT:
6248 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6250 IEM_MC_LOCAL(uint32_t, u32Value);
6251 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
6252 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
6253 IEM_MC_ADVANCE_RIP_AND_FINISH();
6254 IEM_MC_END();
6255 break;
6256
6257 case IEMMODE_64BIT:
6258 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6260 IEM_MC_LOCAL(uint64_t, u64Value);
6261 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
6262 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
6263 IEM_MC_ADVANCE_RIP_AND_FINISH();
6264 IEM_MC_END();
6265 break;
6266
6267 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6268 }
6269 }
6270 else
6271 {
6272 /*
6273 * We're saving the register to memory. The access is word sized
6274 * regardless of operand size prefixes.
6275 */
6276#if 0 /* not necessary */
6277 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
6278#endif
6279 IEM_MC_BEGIN(0, 2, 0, 0);
6280 IEM_MC_LOCAL(uint16_t, u16Value);
6281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6284 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
6285 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
6286 IEM_MC_ADVANCE_RIP_AND_FINISH();
6287 IEM_MC_END();
6288 }
6289}
6290
6291
6292
6293
6294/**
6295 * @opcode 0x8d
6296 */
6297FNIEMOP_DEF(iemOp_lea_Gv_M)
6298{
6299 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
6300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6301 if (IEM_IS_MODRM_REG_MODE(bRm))
6302 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
6303
6304 switch (pVCpu->iem.s.enmEffOpSize)
6305 {
6306 case IEMMODE_16BIT:
6307 IEM_MC_BEGIN(0, 2, 0, 0);
6308 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6309 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6311 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
6312 * operand-size, which is usually the case. It'll save an instruction
6313 * and a register. */
6314 IEM_MC_LOCAL(uint16_t, u16Cast);
6315 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
6316 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
6317 IEM_MC_ADVANCE_RIP_AND_FINISH();
6318 IEM_MC_END();
6319 break;
6320
6321 case IEMMODE_32BIT:
6322 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6326 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
6327 * operand-size, which is usually the case. It'll save an instruction
6328 * and a register. */
6329 IEM_MC_LOCAL(uint32_t, u32Cast);
6330 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
6331 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
6332 IEM_MC_ADVANCE_RIP_AND_FINISH();
6333 IEM_MC_END();
6334 break;
6335
6336 case IEMMODE_64BIT:
6337 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6338 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6341 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
6342 IEM_MC_ADVANCE_RIP_AND_FINISH();
6343 IEM_MC_END();
6344 break;
6345
6346 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6347 }
6348}
6349
6350
6351/**
6352 * @opcode 0x8e
6353 */
6354FNIEMOP_DEF(iemOp_mov_Sw_Ev)
6355{
6356 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
6357
6358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6359
6360 /*
6361 * The practical operand size is 16-bit.
6362 */
6363#if 0 /* not necessary */
6364 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
6365#endif
6366
6367 /*
6368 * Check that the destination register exists and can be used with this
6369 * instruction. The REX.R prefix is ignored.
6370 */
6371 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
6372 /** @todo r=bird: What does 8086 do here wrt CS? */
6373 if ( iSegReg == X86_SREG_CS
6374 || iSegReg > X86_SREG_GS)
6375 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6376
6377 /*
6378 * If rm is denoting a register, no more instruction bytes.
6379 *
6380 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
6381 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
6382 * register. This is a restriction of the current recompiler
6383 * approach.
6384 */
6385 if (IEM_IS_MODRM_REG_MODE(bRm))
6386 {
6387#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
6388 IEM_MC_BEGIN(2, 0, 0, a_fCImplFlags); \
6389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6390 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
6391 IEM_MC_ARG(uint16_t, u16Value, 1); \
6392 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
6393 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
6394 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
6395 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
6396 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
6397 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
6398 iemCImpl_load_SReg, iSRegArg, u16Value); \
6399 IEM_MC_END()
6400
6401 if (iSegReg == X86_SREG_SS)
6402 {
6403 if (IEM_IS_32BIT_CODE(pVCpu))
6404 {
6405 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
6406 }
6407 else
6408 {
6409 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
6410 }
6411 }
6412 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
6413 {
6414 IEMOP_MOV_SW_EV_REG_BODY(0);
6415 }
6416 else
6417 {
6418 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
6419 }
6420#undef IEMOP_MOV_SW_EV_REG_BODY
6421 }
6422 else
6423 {
6424 /*
6425 * We're loading the register from memory. The access is word sized
6426 * regardless of operand size prefixes.
6427 */
6428#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
6429 IEM_MC_BEGIN(2, 1, 0, a_fCImplFlags); \
6430 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
6431 IEM_MC_ARG(uint16_t, u16Value, 1); \
6432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
6433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
6434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6435 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
6436 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
6437 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
6438 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
6439 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
6440 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
6441 iemCImpl_load_SReg, iSRegArg, u16Value); \
6442 IEM_MC_END()
6443
6444 if (iSegReg == X86_SREG_SS)
6445 {
6446 if (IEM_IS_32BIT_CODE(pVCpu))
6447 {
6448 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
6449 }
6450 else
6451 {
6452 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
6453 }
6454 }
6455 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
6456 {
6457 IEMOP_MOV_SW_EV_MEM_BODY(0);
6458 }
6459 else
6460 {
6461 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
6462 }
6463#undef IEMOP_MOV_SW_EV_MEM_BODY
6464 }
6465}
6466
6467
6468/** Opcode 0x8f /0. */
6469FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
6470{
6471 /* This bugger is rather annoying as it requires rSP to be updated before
6472 doing the effective address calculations. Will eventually require a
6473 split between the R/M+SIB decoding and the effective address
6474 calculation - which is something that is required for any attempt at
6475 reusing this code for a recompiler. It may also be good to have if we
6476 need to delay #UD exception caused by invalid lock prefixes.
6477
6478 For now, we'll do a mostly safe interpreter-only implementation here. */
6479 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
6480 * now until tests show it's checked.. */
6481 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
6482
6483 /* Register access is relatively easy and can share code. */
6484 if (IEM_IS_MODRM_REG_MODE(bRm))
6485 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
6486
6487 /*
6488 * Memory target.
6489 *
6490 * Intel says that RSP is incremented before it's used in any effective
6491 * address calcuations. This means some serious extra annoyance here since
6492 * we decode and calculate the effective address in one step and like to
6493 * delay committing registers till everything is done.
6494 *
6495 * So, we'll decode and calculate the effective address twice. This will
6496 * require some recoding if turned into a recompiler.
6497 */
6498 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
6499
6500#if 1 /* This can be compiled, optimize later if needed. */
6501 switch (pVCpu->iem.s.enmEffOpSize)
6502 {
6503 case IEMMODE_16BIT:
6504 IEM_MC_BEGIN(2, 0, 0, 0);
6505 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
6507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6508 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6509 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
6510 IEM_MC_END();
6511 break;
6512
6513 case IEMMODE_32BIT:
6514 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
6515 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
6517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6518 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6519 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
6520 IEM_MC_END();
6521 break;
6522
6523 case IEMMODE_64BIT:
6524 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
6525 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
6527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6528 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6529 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
6530 IEM_MC_END();
6531 break;
6532
6533 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6534 }
6535
6536#else
6537# ifndef TST_IEM_CHECK_MC
6538 /* Calc effective address with modified ESP. */
6539/** @todo testcase */
6540 RTGCPTR GCPtrEff;
6541 VBOXSTRICTRC rcStrict;
6542 switch (pVCpu->iem.s.enmEffOpSize)
6543 {
6544 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
6545 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
6546 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
6547 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6548 }
6549 if (rcStrict != VINF_SUCCESS)
6550 return rcStrict;
6551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6552
6553 /* Perform the operation - this should be CImpl. */
6554 RTUINT64U TmpRsp;
6555 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
6556 switch (pVCpu->iem.s.enmEffOpSize)
6557 {
6558 case IEMMODE_16BIT:
6559 {
6560 uint16_t u16Value;
6561 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
6562 if (rcStrict == VINF_SUCCESS)
6563 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
6564 break;
6565 }
6566
6567 case IEMMODE_32BIT:
6568 {
6569 uint32_t u32Value;
6570 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
6571 if (rcStrict == VINF_SUCCESS)
6572 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
6573 break;
6574 }
6575
6576 case IEMMODE_64BIT:
6577 {
6578 uint64_t u64Value;
6579 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
6580 if (rcStrict == VINF_SUCCESS)
6581 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
6582 break;
6583 }
6584
6585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6586 }
6587 if (rcStrict == VINF_SUCCESS)
6588 {
6589 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
6590 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
6591 }
6592 return rcStrict;
6593
6594# else
6595 return VERR_IEM_IPE_2;
6596# endif
6597#endif
6598}
6599
6600
6601/**
6602 * @opcode 0x8f
6603 */
6604FNIEMOP_DEF(iemOp_Grp1A__xop)
6605{
6606 /*
6607 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
6608 * three byte VEX prefix, except that the mmmmm field cannot have the values
6609 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
6610 */
6611 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6612 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
6613 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
6614
6615 IEMOP_MNEMONIC(xop, "xop");
6616 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
6617 {
6618 /** @todo Test when exctly the XOP conformance checks kick in during
6619 * instruction decoding and fetching (using \#PF). */
6620 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6621 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6622 if ( ( pVCpu->iem.s.fPrefixes
6623 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6624 == 0)
6625 {
6626 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6627 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6628 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6629 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6630 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6631 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6632 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6633 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6634 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6635
6636 /** @todo XOP: Just use new tables and decoders. */
6637 switch (bRm & 0x1f)
6638 {
6639 case 8: /* xop opcode map 8. */
6640 IEMOP_BITCH_ABOUT_STUB();
6641 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6642
6643 case 9: /* xop opcode map 9. */
6644 IEMOP_BITCH_ABOUT_STUB();
6645 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6646
6647 case 10: /* xop opcode map 10. */
6648 IEMOP_BITCH_ABOUT_STUB();
6649 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6650
6651 default:
6652 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6653 IEMOP_RAISE_INVALID_OPCODE_RET();
6654 }
6655 }
6656 else
6657 Log(("XOP: Invalid prefix mix!\n"));
6658 }
6659 else
6660 Log(("XOP: XOP support disabled!\n"));
6661 IEMOP_RAISE_INVALID_OPCODE_RET();
6662}
6663
6664
6665/**
6666 * Common 'xchg reg,rAX' helper.
6667 */
6668FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6669{
6670 iReg |= pVCpu->iem.s.uRexB;
6671 switch (pVCpu->iem.s.enmEffOpSize)
6672 {
6673 case IEMMODE_16BIT:
6674 IEM_MC_BEGIN(0, 2, 0, 0);
6675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6676 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6677 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6678 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6679 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6680 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6681 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6682 IEM_MC_ADVANCE_RIP_AND_FINISH();
6683 IEM_MC_END();
6684 break;
6685
6686 case IEMMODE_32BIT:
6687 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6689 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6690 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6691 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6692 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6693 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6694 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6695 IEM_MC_ADVANCE_RIP_AND_FINISH();
6696 IEM_MC_END();
6697 break;
6698
6699 case IEMMODE_64BIT:
6700 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6702 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6703 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6704 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6705 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6706 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6707 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6708 IEM_MC_ADVANCE_RIP_AND_FINISH();
6709 IEM_MC_END();
6710 break;
6711
6712 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6713 }
6714}
6715
6716
6717/**
6718 * @opcode 0x90
6719 */
6720FNIEMOP_DEF(iemOp_nop)
6721{
6722 /* R8/R8D and RAX/EAX can be exchanged. */
6723 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6724 {
6725 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6726 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6727 }
6728
6729 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6730 {
6731 IEMOP_MNEMONIC(pause, "pause");
6732 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6733 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6734 if (!IEM_IS_IN_GUEST(pVCpu))
6735 { /* probable */ }
6736#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6737 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6738 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmx_pause);
6739#endif
6740#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6741 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6742 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_svm_pause);
6743#endif
6744 }
6745 else
6746 IEMOP_MNEMONIC(nop, "nop");
6747 /** @todo testcase: lock nop; lock pause */
6748 IEM_MC_BEGIN(0, 0, 0, 0);
6749 IEMOP_HLP_DONE_DECODING();
6750 IEM_MC_ADVANCE_RIP_AND_FINISH();
6751 IEM_MC_END();
6752}
6753
6754
6755/**
6756 * @opcode 0x91
6757 */
6758FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6759{
6760 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6761 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6762}
6763
6764
6765/**
6766 * @opcode 0x92
6767 */
6768FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6769{
6770 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6771 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6772}
6773
6774
6775/**
6776 * @opcode 0x93
6777 */
6778FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6779{
6780 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6781 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6782}
6783
6784
6785/**
6786 * @opcode 0x94
6787 */
6788FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6789{
6790 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6791 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6792}
6793
6794
6795/**
6796 * @opcode 0x95
6797 */
6798FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6799{
6800 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6801 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6802}
6803
6804
6805/**
6806 * @opcode 0x96
6807 */
6808FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6809{
6810 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6811 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6812}
6813
6814
6815/**
6816 * @opcode 0x97
6817 */
6818FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6819{
6820 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6821 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6822}
6823
6824
6825/**
6826 * @opcode 0x98
6827 */
6828FNIEMOP_DEF(iemOp_cbw)
6829{
6830 switch (pVCpu->iem.s.enmEffOpSize)
6831 {
6832 case IEMMODE_16BIT:
6833 IEMOP_MNEMONIC(cbw, "cbw");
6834 IEM_MC_BEGIN(0, 1, 0, 0);
6835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6836 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6837 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6838 } IEM_MC_ELSE() {
6839 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6840 } IEM_MC_ENDIF();
6841 IEM_MC_ADVANCE_RIP_AND_FINISH();
6842 IEM_MC_END();
6843 break;
6844
6845 case IEMMODE_32BIT:
6846 IEMOP_MNEMONIC(cwde, "cwde");
6847 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6849 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6850 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6851 } IEM_MC_ELSE() {
6852 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6853 } IEM_MC_ENDIF();
6854 IEM_MC_ADVANCE_RIP_AND_FINISH();
6855 IEM_MC_END();
6856 break;
6857
6858 case IEMMODE_64BIT:
6859 IEMOP_MNEMONIC(cdqe, "cdqe");
6860 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6862 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6863 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6864 } IEM_MC_ELSE() {
6865 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6866 } IEM_MC_ENDIF();
6867 IEM_MC_ADVANCE_RIP_AND_FINISH();
6868 IEM_MC_END();
6869 break;
6870
6871 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6872 }
6873}
6874
6875
6876/**
6877 * @opcode 0x99
6878 */
6879FNIEMOP_DEF(iemOp_cwd)
6880{
6881 switch (pVCpu->iem.s.enmEffOpSize)
6882 {
6883 case IEMMODE_16BIT:
6884 IEMOP_MNEMONIC(cwd, "cwd");
6885 IEM_MC_BEGIN(0, 1, 0, 0);
6886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6887 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6888 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6889 } IEM_MC_ELSE() {
6890 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6891 } IEM_MC_ENDIF();
6892 IEM_MC_ADVANCE_RIP_AND_FINISH();
6893 IEM_MC_END();
6894 break;
6895
6896 case IEMMODE_32BIT:
6897 IEMOP_MNEMONIC(cdq, "cdq");
6898 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6900 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6901 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6902 } IEM_MC_ELSE() {
6903 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6904 } IEM_MC_ENDIF();
6905 IEM_MC_ADVANCE_RIP_AND_FINISH();
6906 IEM_MC_END();
6907 break;
6908
6909 case IEMMODE_64BIT:
6910 IEMOP_MNEMONIC(cqo, "cqo");
6911 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6913 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6914 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6915 } IEM_MC_ELSE() {
6916 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6917 } IEM_MC_ENDIF();
6918 IEM_MC_ADVANCE_RIP_AND_FINISH();
6919 IEM_MC_END();
6920 break;
6921
6922 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6923 }
6924}
6925
6926
6927/**
6928 * @opcode 0x9a
6929 */
6930FNIEMOP_DEF(iemOp_call_Ap)
6931{
6932 IEMOP_MNEMONIC(call_Ap, "call Ap");
6933 IEMOP_HLP_NO_64BIT();
6934
6935 /* Decode the far pointer address and pass it on to the far call C implementation. */
6936 uint32_t off32Seg;
6937 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6938 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6939 else
6940 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6941 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6943 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
6944 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
6945 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6946 /** @todo make task-switches, ring-switches, ++ return non-zero status */
6947}
6948
6949
6950/** Opcode 0x9b. (aka fwait) */
6951FNIEMOP_DEF(iemOp_wait)
6952{
6953 IEMOP_MNEMONIC(wait, "wait");
6954 IEM_MC_BEGIN(0, 0, 0, 0);
6955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6956 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6957 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6958 IEM_MC_ADVANCE_RIP_AND_FINISH();
6959 IEM_MC_END();
6960}
6961
6962
6963/**
6964 * @opcode 0x9c
6965 */
6966FNIEMOP_DEF(iemOp_pushf_Fv)
6967{
6968 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6970 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6971 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6972 iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6973}
6974
6975
6976/**
6977 * @opcode 0x9d
6978 */
6979FNIEMOP_DEF(iemOp_popf_Fv)
6980{
6981 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6983 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6984 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6985 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6986 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6987}
6988
6989
6990/**
6991 * @opcode 0x9e
6992 * @opflmodify cf,pf,af,zf,sf
6993 */
6994FNIEMOP_DEF(iemOp_sahf)
6995{
6996 IEMOP_MNEMONIC(sahf, "sahf");
6997 if ( IEM_IS_64BIT_CODE(pVCpu)
6998 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6999 IEMOP_RAISE_INVALID_OPCODE_RET();
7000 IEM_MC_BEGIN(0, 2, 0, 0);
7001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7002 IEM_MC_LOCAL(uint32_t, u32Flags);
7003 IEM_MC_LOCAL(uint32_t, EFlags);
7004 IEM_MC_FETCH_EFLAGS(EFlags);
7005 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
7006 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7007 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
7008 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
7009 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
7010 IEM_MC_COMMIT_EFLAGS(EFlags);
7011 IEM_MC_ADVANCE_RIP_AND_FINISH();
7012 IEM_MC_END();
7013}
7014
7015
7016/**
7017 * @opcode 0x9f
7018 * @opfltest cf,pf,af,zf,sf
7019 */
7020FNIEMOP_DEF(iemOp_lahf)
7021{
7022 IEMOP_MNEMONIC(lahf, "lahf");
7023 if ( IEM_IS_64BIT_CODE(pVCpu)
7024 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
7025 IEMOP_RAISE_INVALID_OPCODE_RET();
7026 IEM_MC_BEGIN(0, 1, 0, 0);
7027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7028 IEM_MC_LOCAL(uint8_t, u8Flags);
7029 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
7030 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
7031 IEM_MC_ADVANCE_RIP_AND_FINISH();
7032 IEM_MC_END();
7033}
7034
7035
7036/**
7037 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
7038 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
7039 * Will return/throw on failures.
7040 * @param a_GCPtrMemOff The variable to store the offset in.
7041 */
7042#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
7043 do \
7044 { \
7045 switch (pVCpu->iem.s.enmEffAddrMode) \
7046 { \
7047 case IEMMODE_16BIT: \
7048 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
7049 break; \
7050 case IEMMODE_32BIT: \
7051 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
7052 break; \
7053 case IEMMODE_64BIT: \
7054 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
7055 break; \
7056 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
7057 } \
7058 } while (0)
7059
7060/**
7061 * @opcode 0xa0
7062 */
7063FNIEMOP_DEF(iemOp_mov_AL_Ob)
7064{
7065 /*
7066 * Get the offset.
7067 */
7068 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
7069 RTGCPTR GCPtrMemOffDecode;
7070 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
7071
7072 /*
7073 * Fetch AL.
7074 */
7075 IEM_MC_BEGIN(0, 2, 0, 0);
7076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7077 IEM_MC_LOCAL(uint8_t, u8Tmp);
7078 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7079 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
7080 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7081 IEM_MC_ADVANCE_RIP_AND_FINISH();
7082 IEM_MC_END();
7083}
7084
7085
7086/**
7087 * @opcode 0xa1
7088 */
7089FNIEMOP_DEF(iemOp_mov_rAX_Ov)
7090{
7091 /*
7092 * Get the offset.
7093 */
7094 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
7095 RTGCPTR GCPtrMemOffDecode;
7096 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
7097
7098 /*
7099 * Fetch rAX.
7100 */
7101 switch (pVCpu->iem.s.enmEffOpSize)
7102 {
7103 case IEMMODE_16BIT:
7104 IEM_MC_BEGIN(0, 2, 0, 0);
7105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7106 IEM_MC_LOCAL(uint16_t, u16Tmp);
7107 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7108 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
7109 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
7110 IEM_MC_ADVANCE_RIP_AND_FINISH();
7111 IEM_MC_END();
7112 break;
7113
7114 case IEMMODE_32BIT:
7115 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
7116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7117 IEM_MC_LOCAL(uint32_t, u32Tmp);
7118 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7119 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
7120 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
7121 IEM_MC_ADVANCE_RIP_AND_FINISH();
7122 IEM_MC_END();
7123 break;
7124
7125 case IEMMODE_64BIT:
7126 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
7127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7128 IEM_MC_LOCAL(uint64_t, u64Tmp);
7129 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7130 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
7131 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
7132 IEM_MC_ADVANCE_RIP_AND_FINISH();
7133 IEM_MC_END();
7134 break;
7135
7136 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7137 }
7138}
7139
7140
7141/**
7142 * @opcode 0xa2
7143 */
7144FNIEMOP_DEF(iemOp_mov_Ob_AL)
7145{
7146 /*
7147 * Get the offset.
7148 */
7149 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
7150 RTGCPTR GCPtrMemOffDecode;
7151 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
7152
7153 /*
7154 * Store AL.
7155 */
7156 IEM_MC_BEGIN(0, 2, 0, 0);
7157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7158 IEM_MC_LOCAL(uint8_t, u8Tmp);
7159 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
7160 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7161 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
7162 IEM_MC_ADVANCE_RIP_AND_FINISH();
7163 IEM_MC_END();
7164}
7165
7166
7167/**
7168 * @opcode 0xa3
7169 */
7170FNIEMOP_DEF(iemOp_mov_Ov_rAX)
7171{
7172 /*
7173 * Get the offset.
7174 */
7175 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
7176 RTGCPTR GCPtrMemOffDecode;
7177 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
7178
7179 /*
7180 * Store rAX.
7181 */
7182 switch (pVCpu->iem.s.enmEffOpSize)
7183 {
7184 case IEMMODE_16BIT:
7185 IEM_MC_BEGIN(0, 2, 0, 0);
7186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7187 IEM_MC_LOCAL(uint16_t, u16Tmp);
7188 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
7189 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7190 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
7191 IEM_MC_ADVANCE_RIP_AND_FINISH();
7192 IEM_MC_END();
7193 break;
7194
7195 case IEMMODE_32BIT:
7196 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
7197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7198 IEM_MC_LOCAL(uint32_t, u32Tmp);
7199 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
7200 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7201 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
7202 IEM_MC_ADVANCE_RIP_AND_FINISH();
7203 IEM_MC_END();
7204 break;
7205
7206 case IEMMODE_64BIT:
7207 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
7208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7209 IEM_MC_LOCAL(uint64_t, u64Tmp);
7210 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
7211 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
7212 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
7213 IEM_MC_ADVANCE_RIP_AND_FINISH();
7214 IEM_MC_END();
7215 break;
7216
7217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7218 }
7219}
7220
7221/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
7222#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
7223 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7225 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7226 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7227 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7228 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7229 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7230 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7231 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7232 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7233 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7234 } IEM_MC_ELSE() { \
7235 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7236 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7237 } IEM_MC_ENDIF(); \
7238 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7239 IEM_MC_END() \
7240
7241/**
7242 * @opcode 0xa4
7243 * @opfltest df
7244 */
7245FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
7246{
7247 /*
7248 * Use the C implementation if a repeat prefix is encountered.
7249 */
7250 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7251 {
7252 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
7253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7254 switch (pVCpu->iem.s.enmEffAddrMode)
7255 {
7256 case IEMMODE_16BIT:
7257 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7258 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7259 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7260 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7261 iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
7262 case IEMMODE_32BIT:
7263 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7264 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7265 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7266 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7267 iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
7268 case IEMMODE_64BIT:
7269 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7270 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7271 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7272 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7273 iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
7274 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7275 }
7276 }
7277
7278 /*
7279 * Sharing case implementation with movs[wdq] below.
7280 */
7281 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
7282 switch (pVCpu->iem.s.enmEffAddrMode)
7283 {
7284 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7285 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7286 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
7287 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7288 }
7289}
7290
7291
7292/**
7293 * @opcode 0xa5
7294 * @opfltest df
7295 */
7296FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
7297{
7298
7299 /*
7300 * Use the C implementation if a repeat prefix is encountered.
7301 */
7302 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7303 {
7304 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
7305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7306 switch (pVCpu->iem.s.enmEffOpSize)
7307 {
7308 case IEMMODE_16BIT:
7309 switch (pVCpu->iem.s.enmEffAddrMode)
7310 {
7311 case IEMMODE_16BIT:
7312 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7313 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7314 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7315 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7316 iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
7317 case IEMMODE_32BIT:
7318 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7319 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7320 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7321 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7322 iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
7323 case IEMMODE_64BIT:
7324 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7325 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7326 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7327 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7328 iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
7329 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7330 }
7331 break;
7332 case IEMMODE_32BIT:
7333 switch (pVCpu->iem.s.enmEffAddrMode)
7334 {
7335 case IEMMODE_16BIT:
7336 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7337 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7338 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7339 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7340 iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
7341 case IEMMODE_32BIT:
7342 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7343 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7344 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7345 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7346 iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
7347 case IEMMODE_64BIT:
7348 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7349 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7350 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7351 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7352 iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
7353 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7354 }
7355 case IEMMODE_64BIT:
7356 switch (pVCpu->iem.s.enmEffAddrMode)
7357 {
7358 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
7359 case IEMMODE_32BIT:
7360 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7361 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7362 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7363 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7364 iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
7365 case IEMMODE_64BIT:
7366 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7367 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7368 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7369 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7370 iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
7371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7372 }
7373 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7374 }
7375 }
7376
7377 /*
7378 * Annoying double switch here.
7379 * Using ugly macro for implementing the cases, sharing it with movsb.
7380 */
7381 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
7382 switch (pVCpu->iem.s.enmEffOpSize)
7383 {
7384 case IEMMODE_16BIT:
7385 switch (pVCpu->iem.s.enmEffAddrMode)
7386 {
7387 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7388 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7389 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
7390 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7391 }
7392 break;
7393
7394 case IEMMODE_32BIT:
7395 switch (pVCpu->iem.s.enmEffAddrMode)
7396 {
7397 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7398 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7399 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
7400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7401 }
7402 break;
7403
7404 case IEMMODE_64BIT:
7405 switch (pVCpu->iem.s.enmEffAddrMode)
7406 {
7407 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7408 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
7409 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
7410 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7411 }
7412 break;
7413 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7414 }
7415}
7416
7417#undef IEM_MOVS_CASE
7418
7419/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
7420#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
7421 IEM_MC_BEGIN(3, 3, a_fMcFlags, 0); \
7422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7423 \
7424 IEM_MC_LOCAL(RTGCPTR, uAddr1); \
7425 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr1, X86_GREG_xSI); \
7426 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
7427 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr1); \
7428 \
7429 IEM_MC_LOCAL(RTGCPTR, uAddr2); \
7430 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr2, X86_GREG_xDI); \
7431 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
7432 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr2); \
7433 \
7434 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7435 IEM_MC_REF_EFLAGS(pEFlags); \
7436 IEM_MC_ARG_LOCAL_REF(uint##ValBits##_t *, puValue1, uValue1, 0); \
7437 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
7438 \
7439 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7440 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7441 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7442 } IEM_MC_ELSE() { \
7443 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7444 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7445 } IEM_MC_ENDIF(); \
7446 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7447 IEM_MC_END() \
7448
7449/**
7450 * @opcode 0xa6
7451 * @opflclass arithmetic
7452 * @opfltest df
7453 */
7454FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
7455{
7456
7457 /*
7458 * Use the C implementation if a repeat prefix is encountered.
7459 */
7460 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7461 {
7462 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
7463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7464 switch (pVCpu->iem.s.enmEffAddrMode)
7465 {
7466 case IEMMODE_16BIT:
7467 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7468 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7469 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7470 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7471 iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
7472 case IEMMODE_32BIT:
7473 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7474 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7475 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7476 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7477 iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7478 case IEMMODE_64BIT:
7479 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7480 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7481 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7482 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7483 iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7484 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7485 }
7486 }
7487 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7488 {
7489 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
7490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7491 switch (pVCpu->iem.s.enmEffAddrMode)
7492 {
7493 case IEMMODE_16BIT:
7494 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7495 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7496 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7497 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7498 iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
7499 case IEMMODE_32BIT:
7500 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7501 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7502 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7503 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7504 iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7505 case IEMMODE_64BIT:
7506 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7507 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7508 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7509 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7510 iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7511 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7512 }
7513 }
7514
7515 /*
7516 * Sharing case implementation with cmps[wdq] below.
7517 */
7518 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
7519 switch (pVCpu->iem.s.enmEffAddrMode)
7520 {
7521 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7522 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7523 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
7524 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7525 }
7526}
7527
7528
7529/**
7530 * @opcode 0xa7
7531 * @opflclass arithmetic
7532 * @opfltest df
7533 */
7534FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
7535{
7536 /*
7537 * Use the C implementation if a repeat prefix is encountered.
7538 */
7539 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7540 {
7541 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
7542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7543 switch (pVCpu->iem.s.enmEffOpSize)
7544 {
7545 case IEMMODE_16BIT:
7546 switch (pVCpu->iem.s.enmEffAddrMode)
7547 {
7548 case IEMMODE_16BIT:
7549 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7550 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7551 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7552 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7553 iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7554 case IEMMODE_32BIT:
7555 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7556 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7557 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7558 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7559 iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7560 case IEMMODE_64BIT:
7561 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7562 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7563 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7564 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7565 iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7567 }
7568 break;
7569 case IEMMODE_32BIT:
7570 switch (pVCpu->iem.s.enmEffAddrMode)
7571 {
7572 case IEMMODE_16BIT:
7573 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7574 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7575 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7576 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7577 iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7578 case IEMMODE_32BIT:
7579 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7580 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7581 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7582 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7583 iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7584 case IEMMODE_64BIT:
7585 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7586 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7587 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7588 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7589 iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7590 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7591 }
7592 case IEMMODE_64BIT:
7593 switch (pVCpu->iem.s.enmEffAddrMode)
7594 {
7595 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
7596 case IEMMODE_32BIT:
7597 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7598 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7599 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7600 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7601 iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7602 case IEMMODE_64BIT:
7603 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7604 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7605 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7606 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7607 iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7608 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7609 }
7610 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7611 }
7612 }
7613
7614 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7615 {
7616 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
7617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7618 switch (pVCpu->iem.s.enmEffOpSize)
7619 {
7620 case IEMMODE_16BIT:
7621 switch (pVCpu->iem.s.enmEffAddrMode)
7622 {
7623 case IEMMODE_16BIT:
7624 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7625 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7626 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7627 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7628 iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7629 case IEMMODE_32BIT:
7630 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7631 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7632 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7633 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7634 iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7635 case IEMMODE_64BIT:
7636 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7637 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7638 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7639 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7640 iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7641 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7642 }
7643 break;
7644 case IEMMODE_32BIT:
7645 switch (pVCpu->iem.s.enmEffAddrMode)
7646 {
7647 case IEMMODE_16BIT:
7648 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7649 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7650 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7651 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7652 iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7653 case IEMMODE_32BIT:
7654 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7655 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7656 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7657 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7658 iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7659 case IEMMODE_64BIT:
7660 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7661 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7662 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7663 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7664 iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7665 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7666 }
7667 case IEMMODE_64BIT:
7668 switch (pVCpu->iem.s.enmEffAddrMode)
7669 {
7670 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
7671 case IEMMODE_32BIT:
7672 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7673 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7674 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7675 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7676 iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7677 case IEMMODE_64BIT:
7678 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7679 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7680 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7681 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7682 iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7683 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7684 }
7685 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7686 }
7687 }
7688
7689 /*
7690 * Annoying double switch here.
7691 * Using ugly macro for implementing the cases, sharing it with cmpsb.
7692 */
7693 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
7694 switch (pVCpu->iem.s.enmEffOpSize)
7695 {
7696 case IEMMODE_16BIT:
7697 switch (pVCpu->iem.s.enmEffAddrMode)
7698 {
7699 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7700 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7701 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
7702 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7703 }
7704 break;
7705
7706 case IEMMODE_32BIT:
7707 switch (pVCpu->iem.s.enmEffAddrMode)
7708 {
7709 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7710 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7711 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
7712 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7713 }
7714 break;
7715
7716 case IEMMODE_64BIT:
7717 switch (pVCpu->iem.s.enmEffAddrMode)
7718 {
7719 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7720 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
7721 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
7722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7723 }
7724 break;
7725 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7726 }
7727}
7728
7729#undef IEM_CMPS_CASE
7730
7731/**
7732 * @opcode 0xa8
7733 * @opflclass logical
7734 */
7735FNIEMOP_DEF(iemOp_test_AL_Ib)
7736{
7737 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
7738 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7739 IEMOP_BODY_BINARY_AL_Ib(test, 0);
7740}
7741
7742
7743/**
7744 * @opcode 0xa9
7745 * @opflclass logical
7746 */
7747FNIEMOP_DEF(iemOp_test_eAX_Iz)
7748{
7749 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
7750 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7751 IEMOP_BODY_BINARY_rAX_Iz_RO(test, 0);
7752}
7753
7754
7755/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
7756#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
7757 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7759 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7760 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7761 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
7762 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7763 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7764 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7765 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7766 } IEM_MC_ELSE() { \
7767 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7768 } IEM_MC_ENDIF(); \
7769 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7770 IEM_MC_END() \
7771
7772/**
7773 * @opcode 0xaa
7774 */
7775FNIEMOP_DEF(iemOp_stosb_Yb_AL)
7776{
7777 /*
7778 * Use the C implementation if a repeat prefix is encountered.
7779 */
7780 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7781 {
7782 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
7783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7784 switch (pVCpu->iem.s.enmEffAddrMode)
7785 {
7786 case IEMMODE_16BIT:
7787 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7788 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7789 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7790 iemCImpl_stos_al_m16);
7791 case IEMMODE_32BIT:
7792 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7793 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7794 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7795 iemCImpl_stos_al_m32);
7796 case IEMMODE_64BIT:
7797 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7798 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7799 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7800 iemCImpl_stos_al_m64);
7801 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7802 }
7803 }
7804
7805 /*
7806 * Sharing case implementation with stos[wdq] below.
7807 */
7808 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
7809 switch (pVCpu->iem.s.enmEffAddrMode)
7810 {
7811 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7812 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7813 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
7814 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7815 }
7816}
7817
7818
7819/**
7820 * @opcode 0xab
7821 */
7822FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7823{
7824 /*
7825 * Use the C implementation if a repeat prefix is encountered.
7826 */
7827 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7828 {
7829 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7831 switch (pVCpu->iem.s.enmEffOpSize)
7832 {
7833 case IEMMODE_16BIT:
7834 switch (pVCpu->iem.s.enmEffAddrMode)
7835 {
7836 case IEMMODE_16BIT:
7837 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7838 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7839 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7840 iemCImpl_stos_ax_m16);
7841 case IEMMODE_32BIT:
7842 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7843 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7844 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7845 iemCImpl_stos_ax_m32);
7846 case IEMMODE_64BIT:
7847 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7848 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7849 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7850 iemCImpl_stos_ax_m64);
7851 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7852 }
7853 break;
7854 case IEMMODE_32BIT:
7855 switch (pVCpu->iem.s.enmEffAddrMode)
7856 {
7857 case IEMMODE_16BIT:
7858 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7859 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7860 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7861 iemCImpl_stos_eax_m16);
7862 case IEMMODE_32BIT:
7863 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7864 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7865 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7866 iemCImpl_stos_eax_m32);
7867 case IEMMODE_64BIT:
7868 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7869 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7870 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7871 iemCImpl_stos_eax_m64);
7872 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7873 }
7874 case IEMMODE_64BIT:
7875 switch (pVCpu->iem.s.enmEffAddrMode)
7876 {
7877 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7878 case IEMMODE_32BIT:
7879 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7880 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7881 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7882 iemCImpl_stos_rax_m32);
7883 case IEMMODE_64BIT:
7884 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7885 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7886 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7887 iemCImpl_stos_rax_m64);
7888 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7889 }
7890 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7891 }
7892 }
7893
7894 /*
7895 * Annoying double switch here.
7896 * Using ugly macro for implementing the cases, sharing it with stosb.
7897 */
7898 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7899 switch (pVCpu->iem.s.enmEffOpSize)
7900 {
7901 case IEMMODE_16BIT:
7902 switch (pVCpu->iem.s.enmEffAddrMode)
7903 {
7904 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7905 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7906 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7908 }
7909 break;
7910
7911 case IEMMODE_32BIT:
7912 switch (pVCpu->iem.s.enmEffAddrMode)
7913 {
7914 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7915 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7916 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7917 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7918 }
7919 break;
7920
7921 case IEMMODE_64BIT:
7922 switch (pVCpu->iem.s.enmEffAddrMode)
7923 {
7924 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7925 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7926 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7927 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7928 }
7929 break;
7930 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7931 }
7932}
7933
7934#undef IEM_STOS_CASE
7935
7936/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7937#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7938 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7940 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7941 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7942 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7943 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7944 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7945 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7946 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7947 } IEM_MC_ELSE() { \
7948 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7949 } IEM_MC_ENDIF(); \
7950 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7951 IEM_MC_END() \
7952
7953/**
7954 * @opcode 0xac
7955 * @opfltest df
7956 */
7957FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7958{
7959 /*
7960 * Use the C implementation if a repeat prefix is encountered.
7961 */
7962 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7963 {
7964 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7966 switch (pVCpu->iem.s.enmEffAddrMode)
7967 {
7968 case IEMMODE_16BIT:
7969 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7970 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7971 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7972 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7973 iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7974 case IEMMODE_32BIT:
7975 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7976 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7977 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7978 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7979 iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7980 case IEMMODE_64BIT:
7981 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7982 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7983 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7984 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7985 iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7986 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7987 }
7988 }
7989
7990 /*
7991 * Sharing case implementation with stos[wdq] below.
7992 */
7993 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7994 switch (pVCpu->iem.s.enmEffAddrMode)
7995 {
7996 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7997 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7998 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7999 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8000 }
8001}
8002
8003
8004/**
8005 * @opcode 0xad
8006 * @opfltest df
8007 */
8008FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
8009{
8010 /*
8011 * Use the C implementation if a repeat prefix is encountered.
8012 */
8013 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8014 {
8015 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
8016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8017 switch (pVCpu->iem.s.enmEffOpSize)
8018 {
8019 case IEMMODE_16BIT:
8020 switch (pVCpu->iem.s.enmEffAddrMode)
8021 {
8022 case IEMMODE_16BIT:
8023 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8024 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8025 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8026 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8027 iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
8028 case IEMMODE_32BIT:
8029 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8030 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8031 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8032 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8033 iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
8034 case IEMMODE_64BIT:
8035 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8036 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8037 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8038 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8039 iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
8040 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8041 }
8042 break;
8043 case IEMMODE_32BIT:
8044 switch (pVCpu->iem.s.enmEffAddrMode)
8045 {
8046 case IEMMODE_16BIT:
8047 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8048 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8049 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8050 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8051 iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
8052 case IEMMODE_32BIT:
8053 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8054 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8055 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8056 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8057 iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
8058 case IEMMODE_64BIT:
8059 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8060 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8061 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8062 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8063 iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
8064 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8065 }
8066 case IEMMODE_64BIT:
8067 switch (pVCpu->iem.s.enmEffAddrMode)
8068 {
8069 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
8070 case IEMMODE_32BIT:
8071 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8072 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8073 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8074 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8075 iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
8076 case IEMMODE_64BIT:
8077 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
8078 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8079 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
8080 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8081 iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
8082 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8083 }
8084 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8085 }
8086 }
8087
8088 /*
8089 * Annoying double switch here.
8090 * Using ugly macro for implementing the cases, sharing it with lodsb.
8091 */
8092 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
8093 switch (pVCpu->iem.s.enmEffOpSize)
8094 {
8095 case IEMMODE_16BIT:
8096 switch (pVCpu->iem.s.enmEffAddrMode)
8097 {
8098 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
8099 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
8100 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
8101 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8102 }
8103 break;
8104
8105 case IEMMODE_32BIT:
8106 switch (pVCpu->iem.s.enmEffAddrMode)
8107 {
8108 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
8109 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
8110 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
8111 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8112 }
8113 break;
8114
8115 case IEMMODE_64BIT:
8116 switch (pVCpu->iem.s.enmEffAddrMode)
8117 {
8118 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
8119 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
8120 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
8121 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8122 }
8123 break;
8124 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8125 }
8126}
8127
8128#undef IEM_LODS_CASE
8129
8130/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
8131#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
8132 IEM_MC_BEGIN(3, 2, a_fMcFlags, 0); \
8133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8134 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
8135 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
8136 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8137 IEM_MC_LOCAL(RTGCPTR, uAddr); \
8138 \
8139 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
8140 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
8141 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
8142 IEM_MC_REF_EFLAGS(pEFlags); \
8143 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
8144 \
8145 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
8146 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
8147 } IEM_MC_ELSE() { \
8148 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
8149 } IEM_MC_ENDIF(); \
8150 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8151 IEM_MC_END();
8152
8153/**
8154 * @opcode 0xae
8155 * @opflclass arithmetic
8156 * @opfltest df
8157 */
8158FNIEMOP_DEF(iemOp_scasb_AL_Xb)
8159{
8160 /*
8161 * Use the C implementation if a repeat prefix is encountered.
8162 */
8163 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
8164 {
8165 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
8166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8167 switch (pVCpu->iem.s.enmEffAddrMode)
8168 {
8169 case IEMMODE_16BIT:
8170 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8171 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8172 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8173 iemCImpl_repe_scas_al_m16);
8174 case IEMMODE_32BIT:
8175 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8176 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8177 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8178 iemCImpl_repe_scas_al_m32);
8179 case IEMMODE_64BIT:
8180 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8181 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8182 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8183 iemCImpl_repe_scas_al_m64);
8184 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8185 }
8186 }
8187 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
8188 {
8189 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
8190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8191 switch (pVCpu->iem.s.enmEffAddrMode)
8192 {
8193 case IEMMODE_16BIT:
8194 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8195 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8196 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8197 iemCImpl_repne_scas_al_m16);
8198 case IEMMODE_32BIT:
8199 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8200 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8201 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8202 iemCImpl_repne_scas_al_m32);
8203 case IEMMODE_64BIT:
8204 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8205 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8206 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8207 iemCImpl_repne_scas_al_m64);
8208 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8209 }
8210 }
8211
8212 /*
8213 * Sharing case implementation with stos[wdq] below.
8214 */
8215 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
8216 switch (pVCpu->iem.s.enmEffAddrMode)
8217 {
8218 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
8219 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
8220 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
8221 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8222 }
8223}
8224
8225
8226/**
8227 * @opcode 0xaf
8228 * @opflclass arithmetic
8229 * @opfltest df
8230 */
8231FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
8232{
8233 /*
8234 * Use the C implementation if a repeat prefix is encountered.
8235 */
8236 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
8237 {
8238 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
8239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8240 switch (pVCpu->iem.s.enmEffOpSize)
8241 {
8242 case IEMMODE_16BIT:
8243 switch (pVCpu->iem.s.enmEffAddrMode)
8244 {
8245 case IEMMODE_16BIT:
8246 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8247 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8248 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8249 iemCImpl_repe_scas_ax_m16);
8250 case IEMMODE_32BIT:
8251 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8252 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8253 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8254 iemCImpl_repe_scas_ax_m32);
8255 case IEMMODE_64BIT:
8256 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8257 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8258 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8259 iemCImpl_repe_scas_ax_m64);
8260 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8261 }
8262 break;
8263 case IEMMODE_32BIT:
8264 switch (pVCpu->iem.s.enmEffAddrMode)
8265 {
8266 case IEMMODE_16BIT:
8267 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8268 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8269 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8270 iemCImpl_repe_scas_eax_m16);
8271 case IEMMODE_32BIT:
8272 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8273 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8274 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8275 iemCImpl_repe_scas_eax_m32);
8276 case IEMMODE_64BIT:
8277 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8278 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8279 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8280 iemCImpl_repe_scas_eax_m64);
8281 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8282 }
8283 case IEMMODE_64BIT:
8284 switch (pVCpu->iem.s.enmEffAddrMode)
8285 {
8286 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
8287 case IEMMODE_32BIT:
8288 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8289 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8290 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8291 iemCImpl_repe_scas_rax_m32);
8292 case IEMMODE_64BIT:
8293 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8294 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8295 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8296 iemCImpl_repe_scas_rax_m64);
8297 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8298 }
8299 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8300 }
8301 }
8302 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
8303 {
8304 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
8305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8306 switch (pVCpu->iem.s.enmEffOpSize)
8307 {
8308 case IEMMODE_16BIT:
8309 switch (pVCpu->iem.s.enmEffAddrMode)
8310 {
8311 case IEMMODE_16BIT:
8312 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8313 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8314 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8315 iemCImpl_repne_scas_ax_m16);
8316 case IEMMODE_32BIT:
8317 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8318 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8319 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8320 iemCImpl_repne_scas_ax_m32);
8321 case IEMMODE_64BIT:
8322 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8323 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8324 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8325 iemCImpl_repne_scas_ax_m64);
8326 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8327 }
8328 break;
8329 case IEMMODE_32BIT:
8330 switch (pVCpu->iem.s.enmEffAddrMode)
8331 {
8332 case IEMMODE_16BIT:
8333 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8334 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8335 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8336 iemCImpl_repne_scas_eax_m16);
8337 case IEMMODE_32BIT:
8338 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8339 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8340 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8341 iemCImpl_repne_scas_eax_m32);
8342 case IEMMODE_64BIT:
8343 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8344 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8345 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8346 iemCImpl_repne_scas_eax_m64);
8347 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8348 }
8349 case IEMMODE_64BIT:
8350 switch (pVCpu->iem.s.enmEffAddrMode)
8351 {
8352 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
8353 case IEMMODE_32BIT:
8354 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8355 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8356 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8357 iemCImpl_repne_scas_rax_m32);
8358 case IEMMODE_64BIT:
8359 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8360 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8361 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8362 iemCImpl_repne_scas_rax_m64);
8363 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8364 }
8365 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8366 }
8367 }
8368
8369 /*
8370 * Annoying double switch here.
8371 * Using ugly macro for implementing the cases, sharing it with scasb.
8372 */
8373 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
8374 switch (pVCpu->iem.s.enmEffOpSize)
8375 {
8376 case IEMMODE_16BIT:
8377 switch (pVCpu->iem.s.enmEffAddrMode)
8378 {
8379 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
8380 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
8381 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
8382 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8383 }
8384 break;
8385
8386 case IEMMODE_32BIT:
8387 switch (pVCpu->iem.s.enmEffAddrMode)
8388 {
8389 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
8390 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
8391 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
8392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8393 }
8394 break;
8395
8396 case IEMMODE_64BIT:
8397 switch (pVCpu->iem.s.enmEffAddrMode)
8398 {
8399 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
8400 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
8401 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
8402 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8403 }
8404 break;
8405 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8406 }
8407}
8408
8409#undef IEM_SCAS_CASE
8410
8411/**
8412 * Common 'mov r8, imm8' helper.
8413 */
8414FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
8415{
8416 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8417 IEM_MC_BEGIN(0, 0, 0, 0);
8418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8419 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
8420 IEM_MC_ADVANCE_RIP_AND_FINISH();
8421 IEM_MC_END();
8422}
8423
8424
8425/**
8426 * @opcode 0xb0
8427 */
8428FNIEMOP_DEF(iemOp_mov_AL_Ib)
8429{
8430 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
8431 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8432}
8433
8434
8435/**
8436 * @opcode 0xb1
8437 */
8438FNIEMOP_DEF(iemOp_CL_Ib)
8439{
8440 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
8441 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8442}
8443
8444
8445/**
8446 * @opcode 0xb2
8447 */
8448FNIEMOP_DEF(iemOp_DL_Ib)
8449{
8450 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
8451 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8452}
8453
8454
8455/**
8456 * @opcode 0xb3
8457 */
8458FNIEMOP_DEF(iemOp_BL_Ib)
8459{
8460 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
8461 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8462}
8463
8464
8465/**
8466 * @opcode 0xb4
8467 */
8468FNIEMOP_DEF(iemOp_mov_AH_Ib)
8469{
8470 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
8471 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8472}
8473
8474
8475/**
8476 * @opcode 0xb5
8477 */
8478FNIEMOP_DEF(iemOp_CH_Ib)
8479{
8480 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
8481 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8482}
8483
8484
8485/**
8486 * @opcode 0xb6
8487 */
8488FNIEMOP_DEF(iemOp_DH_Ib)
8489{
8490 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
8491 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8492}
8493
8494
8495/**
8496 * @opcode 0xb7
8497 */
8498FNIEMOP_DEF(iemOp_BH_Ib)
8499{
8500 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
8501 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8502}
8503
8504
8505/**
8506 * Common 'mov regX,immX' helper.
8507 */
8508FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
8509{
8510 switch (pVCpu->iem.s.enmEffOpSize)
8511 {
8512 case IEMMODE_16BIT:
8513 IEM_MC_BEGIN(0, 0, 0, 0);
8514 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8516 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
8517 IEM_MC_ADVANCE_RIP_AND_FINISH();
8518 IEM_MC_END();
8519 break;
8520
8521 case IEMMODE_32BIT:
8522 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8523 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8525 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
8526 IEM_MC_ADVANCE_RIP_AND_FINISH();
8527 IEM_MC_END();
8528 break;
8529
8530 case IEMMODE_64BIT:
8531 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8532 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
8533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8534 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
8535 IEM_MC_ADVANCE_RIP_AND_FINISH();
8536 IEM_MC_END();
8537 break;
8538 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8539 }
8540}
8541
8542
8543/**
8544 * @opcode 0xb8
8545 */
8546FNIEMOP_DEF(iemOp_eAX_Iv)
8547{
8548 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
8549 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8550}
8551
8552
8553/**
8554 * @opcode 0xb9
8555 */
8556FNIEMOP_DEF(iemOp_eCX_Iv)
8557{
8558 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
8559 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8560}
8561
8562
8563/**
8564 * @opcode 0xba
8565 */
8566FNIEMOP_DEF(iemOp_eDX_Iv)
8567{
8568 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
8569 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8570}
8571
8572
8573/**
8574 * @opcode 0xbb
8575 */
8576FNIEMOP_DEF(iemOp_eBX_Iv)
8577{
8578 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
8579 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8580}
8581
8582
8583/**
8584 * @opcode 0xbc
8585 */
8586FNIEMOP_DEF(iemOp_eSP_Iv)
8587{
8588 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
8589 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8590}
8591
8592
8593/**
8594 * @opcode 0xbd
8595 */
8596FNIEMOP_DEF(iemOp_eBP_Iv)
8597{
8598 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
8599 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8600}
8601
8602
8603/**
8604 * @opcode 0xbe
8605 */
8606FNIEMOP_DEF(iemOp_eSI_Iv)
8607{
8608 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
8609 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8610}
8611
8612
8613/**
8614 * @opcode 0xbf
8615 */
8616FNIEMOP_DEF(iemOp_eDI_Iv)
8617{
8618 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
8619 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8620}
8621
8622
8623/**
8624 * @opcode 0xc0
8625 */
8626FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
8627{
8628 IEMOP_HLP_MIN_186();
8629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8630
8631 /* Need to use a body macro here since the EFLAGS behaviour differs between
8632 the shifts, rotates and rotate w/ carry. Sigh. */
8633#define GRP2_BODY_Eb_Ib(a_pImplExpr) \
8634 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8635 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8636 { \
8637 /* register */ \
8638 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8639 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); \
8640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8641 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8642 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8643 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8644 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8645 IEM_MC_REF_EFLAGS(pEFlags); \
8646 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8647 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8648 IEM_MC_END(); \
8649 } \
8650 else \
8651 { \
8652 /* memory */ \
8653 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0); \
8654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8656 \
8657 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8659 \
8660 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8661 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8662 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8663 \
8664 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8665 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8666 IEM_MC_FETCH_EFLAGS(EFlags); \
8667 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8668 \
8669 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8670 IEM_MC_COMMIT_EFLAGS(EFlags); \
8671 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8672 IEM_MC_END(); \
8673 } (void)0
8674
8675 switch (IEM_GET_MODRM_REG_8(bRm))
8676 {
8677 /**
8678 * @opdone
8679 * @opmaps grp2_c0
8680 * @opcode /0
8681 * @opflclass rotate_count
8682 */
8683 case 0:
8684 {
8685 IEMOP_MNEMONIC2(MI, ROL, rol, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8686 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8687 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8688 break;
8689 }
8690 /**
8691 * @opdone
8692 * @opmaps grp2_c0
8693 * @opcode /1
8694 * @opflclass rotate_count
8695 */
8696 case 1:
8697 {
8698 IEMOP_MNEMONIC2(MI, ROR, ror, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8699 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8700 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8701 break;
8702 }
8703 /**
8704 * @opdone
8705 * @opmaps grp2_c0
8706 * @opcode /2
8707 * @opflclass rotate_carry_count
8708 */
8709 case 2:
8710 {
8711 IEMOP_MNEMONIC2(MI, RCL, rcl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8712 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8713 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8714 break;
8715 }
8716 /**
8717 * @opdone
8718 * @opmaps grp2_c0
8719 * @opcode /3
8720 * @opflclass rotate_carry_count
8721 */
8722 case 3:
8723 {
8724 IEMOP_MNEMONIC2(MI, RCR, rcr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8725 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8726 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8727 break;
8728 }
8729 /**
8730 * @opdone
8731 * @opmaps grp2_c0
8732 * @opcode /4
8733 * @opflclass shift_count
8734 */
8735 case 4:
8736 {
8737 IEMOP_MNEMONIC2(MI, SHL, shl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8738 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8739 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8740 break;
8741 }
8742 /**
8743 * @opdone
8744 * @opmaps grp2_c0
8745 * @opcode /5
8746 * @opflclass shift_count
8747 */
8748 case 5:
8749 {
8750 IEMOP_MNEMONIC2(MI, SHR, shr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8751 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8752 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8753 break;
8754 }
8755 /**
8756 * @opdone
8757 * @opmaps grp2_c0
8758 * @opcode /7
8759 * @opflclass shift_count
8760 */
8761 case 7:
8762 {
8763 IEMOP_MNEMONIC2(MI, SAR, sar, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8764 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8765 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8766 break;
8767 }
8768
8769 /** @opdone */
8770 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8771 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8772 }
8773#undef GRP2_BODY_Eb_Ib
8774}
8775
8776
8777/* Need to use a body macro here since the EFLAGS behaviour differs between
8778 the shifts, rotates and rotate w/ carry. Sigh. */
8779#define GRP2_BODY_Ev_Ib(a_pImplExpr) \
8780 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8781 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8782 { \
8783 /* register */ \
8784 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8785 switch (pVCpu->iem.s.enmEffOpSize) \
8786 { \
8787 case IEMMODE_16BIT: \
8788 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); \
8789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8790 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8791 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8792 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8793 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8794 IEM_MC_REF_EFLAGS(pEFlags); \
8795 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
8796 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8797 IEM_MC_END(); \
8798 break; \
8799 \
8800 case IEMMODE_32BIT: \
8801 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
8802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8803 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8804 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8805 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8806 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8807 IEM_MC_REF_EFLAGS(pEFlags); \
8808 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
8809 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8810 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8811 IEM_MC_END(); \
8812 break; \
8813 \
8814 case IEMMODE_64BIT: \
8815 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
8816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8817 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8818 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8819 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8820 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8821 IEM_MC_REF_EFLAGS(pEFlags); \
8822 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
8823 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8824 IEM_MC_END(); \
8825 break; \
8826 \
8827 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8828 } \
8829 } \
8830 else \
8831 { \
8832 /* memory */ \
8833 switch (pVCpu->iem.s.enmEffOpSize) \
8834 { \
8835 case IEMMODE_16BIT: \
8836 IEM_MC_BEGIN(3, 3, 0, 0); \
8837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8839 \
8840 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8842 \
8843 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8844 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8845 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8846 \
8847 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8848 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8849 IEM_MC_FETCH_EFLAGS(EFlags); \
8850 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
8851 \
8852 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8853 IEM_MC_COMMIT_EFLAGS(EFlags); \
8854 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8855 IEM_MC_END(); \
8856 break; \
8857 \
8858 case IEMMODE_32BIT: \
8859 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
8860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8862 \
8863 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8865 \
8866 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8867 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8868 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8869 \
8870 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8871 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8872 IEM_MC_FETCH_EFLAGS(EFlags); \
8873 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
8874 \
8875 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8876 IEM_MC_COMMIT_EFLAGS(EFlags); \
8877 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8878 IEM_MC_END(); \
8879 break; \
8880 \
8881 case IEMMODE_64BIT: \
8882 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
8883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8885 \
8886 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8888 \
8889 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8890 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8891 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8892 \
8893 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8894 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8895 IEM_MC_FETCH_EFLAGS(EFlags); \
8896 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
8897 \
8898 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8899 IEM_MC_COMMIT_EFLAGS(EFlags); \
8900 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8901 IEM_MC_END(); \
8902 break; \
8903 \
8904 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8905 } \
8906 } (void)0
8907
8908/**
8909 * @opmaps grp2_c1
8910 * @opcode /0
8911 * @opflclass rotate_count
8912 */
8913FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_Ib, uint8_t, bRm)
8914{
8915 IEMOP_MNEMONIC2(MI, ROL, rol, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8916 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8917}
8918
8919
8920/**
8921 * @opmaps grp2_c1
8922 * @opcode /1
8923 * @opflclass rotate_count
8924 */
8925FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_Ib, uint8_t, bRm)
8926{
8927 IEMOP_MNEMONIC2(MI, ROR, ror, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8928 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8929}
8930
8931
8932/**
8933 * @opmaps grp2_c1
8934 * @opcode /2
8935 * @opflclass rotate_carry_count
8936 */
8937FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_Ib, uint8_t, bRm)
8938{
8939 IEMOP_MNEMONIC2(MI, RCL, rcl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8940 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8941}
8942
8943
8944/**
8945 * @opmaps grp2_c1
8946 * @opcode /3
8947 * @opflclass rotate_carry_count
8948 */
8949FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_Ib, uint8_t, bRm)
8950{
8951 IEMOP_MNEMONIC2(MI, RCR, rcr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8952 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8953}
8954
8955
8956/**
8957 * @opmaps grp2_c1
8958 * @opcode /4
8959 * @opflclass shift_count
8960 */
8961FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_Ib, uint8_t, bRm)
8962{
8963 IEMOP_MNEMONIC2(MI, SHL, shl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8964 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8965}
8966
8967
8968/**
8969 * @opmaps grp2_c1
8970 * @opcode /5
8971 * @opflclass shift_count
8972 */
8973FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_Ib, uint8_t, bRm)
8974{
8975 IEMOP_MNEMONIC2(MI, SHR, shr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8976 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8977}
8978
8979
8980/**
8981 * @opmaps grp2_c1
8982 * @opcode /7
8983 * @opflclass shift_count
8984 */
8985FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_Ib, uint8_t, bRm)
8986{
8987 IEMOP_MNEMONIC2(MI, SAR, sar, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8988 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8989}
8990
8991#undef GRP2_BODY_Ev_Ib
8992
8993/**
8994 * @opcode 0xc1
8995 */
8996FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
8997{
8998 IEMOP_HLP_MIN_186();
8999 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9000
9001 switch (IEM_GET_MODRM_REG_8(bRm))
9002 {
9003 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_Ib, bRm);
9004 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_Ib, bRm);
9005 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_Ib, bRm);
9006 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_Ib, bRm);
9007 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_Ib, bRm);
9008 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_Ib, bRm);
9009 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_Ib, bRm);
9010 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9011 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
9012 }
9013}
9014
9015
9016/**
9017 * @opcode 0xc2
9018 */
9019FNIEMOP_DEF(iemOp_retn_Iw)
9020{
9021 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
9022 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9023 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
9024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9025 switch (pVCpu->iem.s.enmEffOpSize)
9026 {
9027 case IEMMODE_16BIT:
9028 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
9029 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_16, u16Imm);
9030 case IEMMODE_32BIT:
9031 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
9032 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_32, u16Imm);
9033 case IEMMODE_64BIT:
9034 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
9035 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_64, u16Imm);
9036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9037 }
9038}
9039
9040
9041/**
9042 * @opcode 0xc3
9043 */
9044FNIEMOP_DEF(iemOp_retn)
9045{
9046 IEMOP_MNEMONIC(retn, "retn");
9047 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
9048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9049 switch (pVCpu->iem.s.enmEffOpSize)
9050 {
9051 case IEMMODE_16BIT:
9052 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
9053 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_16);
9054 case IEMMODE_32BIT:
9055 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
9056 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_32);
9057 case IEMMODE_64BIT:
9058 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
9059 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_64);
9060 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9061 }
9062}
9063
9064
9065/**
9066 * @opcode 0xc4
9067 */
9068FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
9069{
9070 /* The LDS instruction is invalid 64-bit mode. In legacy and
9071 compatability mode it is invalid with MOD=3.
9072 The use as a VEX prefix is made possible by assigning the inverted
9073 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
9074 outside of 64-bit mode. VEX is not available in real or v86 mode. */
9075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9076 if ( IEM_IS_64BIT_CODE(pVCpu)
9077 || IEM_IS_MODRM_REG_MODE(bRm) )
9078 {
9079 IEMOP_MNEMONIC(vex3_prefix, "vex3");
9080 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
9081 {
9082 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
9083 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
9084 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
9085 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
9086 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
9087#if 1
9088 AssertCompile(IEM_OP_PRF_SIZE_REX_W == RT_BIT_32(9));
9089 pVCpu->iem.s.fPrefixes |= (uint32_t)(bVex2 & 0x80) << (9 - 7);
9090#else
9091 if (bVex2 & 0x80 /* VEX.W */)
9092 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
9093#endif
9094 if (IEM_IS_64BIT_CODE(pVCpu))
9095 {
9096#if 1
9097 AssertCompile(IEM_OP_PRF_REX_B == RT_BIT_32(25) && IEM_OP_PRF_REX_X == RT_BIT_32(26) && IEM_OP_PRF_REX_R == RT_BIT_32(27));
9098 pVCpu->iem.s.fPrefixes |= (uint32_t)(~bRm & 0xe0) << (25 - 5);
9099#else
9100 if (~bRm & 0x20 /* VEX.~B */)
9101 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_B;
9102 if (~bRm & 0x40 /* VEX.~X */)
9103 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_X;
9104 if (~bRm & 0x80 /* VEX.~R */)
9105 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_R;
9106#endif
9107 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
9108 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
9109 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
9110 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
9111 }
9112 else
9113 {
9114 pVCpu->iem.s.uRexReg = 0;
9115 pVCpu->iem.s.uRexIndex = 0;
9116 pVCpu->iem.s.uRexB = 0;
9117 /** @todo testcase: Will attemps to access registers 8 thru 15 from 16&32 bit
9118 * code raise \#UD or just be ignored? We're ignoring for now... */
9119 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0x7;
9120 }
9121 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
9122 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
9123
9124 switch (bRm & 0x1f)
9125 {
9126 case 1: /* 0x0f lead opcode byte. */
9127#ifdef IEM_WITH_VEX
9128 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
9129#else
9130 IEMOP_BITCH_ABOUT_STUB();
9131 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9132#endif
9133
9134 case 2: /* 0x0f 0x38 lead opcode bytes. */
9135#ifdef IEM_WITH_VEX
9136 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
9137#else
9138 IEMOP_BITCH_ABOUT_STUB();
9139 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9140#endif
9141
9142 case 3: /* 0x0f 0x3a lead opcode bytes. */
9143#ifdef IEM_WITH_VEX
9144 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
9145#else
9146 IEMOP_BITCH_ABOUT_STUB();
9147 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9148#endif
9149
9150 default:
9151 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
9152 IEMOP_RAISE_INVALID_OPCODE_RET();
9153 }
9154 }
9155 Log(("VEX3: VEX support disabled!\n"));
9156 IEMOP_RAISE_INVALID_OPCODE_RET();
9157 }
9158
9159 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
9160 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
9161}
9162
9163
9164/**
9165 * @opcode 0xc5
9166 */
9167FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
9168{
9169 /* The LES instruction is invalid 64-bit mode. In legacy and
9170 compatability mode it is invalid with MOD=3.
9171 The use as a VEX prefix is made possible by assigning the inverted
9172 REX.R to the top MOD bit, and the top bit in the inverted register
9173 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
9174 to accessing registers 0..7 in this VEX form. */
9175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9176 if ( IEM_IS_64BIT_CODE(pVCpu)
9177 || IEM_IS_MODRM_REG_MODE(bRm))
9178 {
9179 IEMOP_MNEMONIC(vex2_prefix, "vex2");
9180 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
9181 {
9182 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
9183 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
9184 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
9185 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
9186 AssertCompile(IEM_OP_PRF_REX_R == RT_BIT_32(27));
9187 pVCpu->iem.s.fPrefixes |= (uint32_t)(~bRm & 0x80) << (27 - 7);
9188 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
9189 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
9190 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
9191 pVCpu->iem.s.idxPrefix = bRm & 0x3;
9192
9193#ifdef IEM_WITH_VEX
9194 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
9195#else
9196 IEMOP_BITCH_ABOUT_STUB();
9197 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9198#endif
9199 }
9200
9201 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
9202 Log(("VEX2: VEX support disabled!\n"));
9203 IEMOP_RAISE_INVALID_OPCODE_RET();
9204 }
9205
9206 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
9207 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
9208}
9209
9210
9211/**
9212 * @opcode 0xc6
9213 */
9214FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
9215{
9216 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9217 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
9218 IEMOP_RAISE_INVALID_OPCODE_RET();
9219 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
9220
9221 if (IEM_IS_MODRM_REG_MODE(bRm))
9222 {
9223 /* register access */
9224 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9225 IEM_MC_BEGIN(0, 0, 0, 0);
9226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9227 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
9228 IEM_MC_ADVANCE_RIP_AND_FINISH();
9229 IEM_MC_END();
9230 }
9231 else
9232 {
9233 /* memory access. */
9234 IEM_MC_BEGIN(0, 1, 0, 0);
9235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9237 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9239 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
9240 IEM_MC_ADVANCE_RIP_AND_FINISH();
9241 IEM_MC_END();
9242 }
9243}
9244
9245
9246/**
9247 * @opcode 0xc7
9248 */
9249FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
9250{
9251 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9252 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
9253 IEMOP_RAISE_INVALID_OPCODE_RET();
9254 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
9255
9256 if (IEM_IS_MODRM_REG_MODE(bRm))
9257 {
9258 /* register access */
9259 switch (pVCpu->iem.s.enmEffOpSize)
9260 {
9261 case IEMMODE_16BIT:
9262 IEM_MC_BEGIN(0, 0, 0, 0);
9263 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9265 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
9266 IEM_MC_ADVANCE_RIP_AND_FINISH();
9267 IEM_MC_END();
9268 break;
9269
9270 case IEMMODE_32BIT:
9271 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
9272 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9274 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
9275 IEM_MC_ADVANCE_RIP_AND_FINISH();
9276 IEM_MC_END();
9277 break;
9278
9279 case IEMMODE_64BIT:
9280 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
9281 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9283 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
9284 IEM_MC_ADVANCE_RIP_AND_FINISH();
9285 IEM_MC_END();
9286 break;
9287
9288 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9289 }
9290 }
9291 else
9292 {
9293 /* memory access. */
9294 switch (pVCpu->iem.s.enmEffOpSize)
9295 {
9296 case IEMMODE_16BIT:
9297 IEM_MC_BEGIN(0, 1, 0, 0);
9298 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9300 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9302 IEM_MC_STORE_MEM_U16_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
9303 IEM_MC_ADVANCE_RIP_AND_FINISH();
9304 IEM_MC_END();
9305 break;
9306
9307 case IEMMODE_32BIT:
9308 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
9309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9310 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9311 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9313 IEM_MC_STORE_MEM_U32_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
9314 IEM_MC_ADVANCE_RIP_AND_FINISH();
9315 IEM_MC_END();
9316 break;
9317
9318 case IEMMODE_64BIT:
9319 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
9320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9322 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9324 IEM_MC_STORE_MEM_U64_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
9325 IEM_MC_ADVANCE_RIP_AND_FINISH();
9326 IEM_MC_END();
9327 break;
9328
9329 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9330 }
9331 }
9332}
9333
9334
9335
9336
9337/**
9338 * @opcode 0xc8
9339 */
9340FNIEMOP_DEF(iemOp_enter_Iw_Ib)
9341{
9342 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
9343 IEMOP_HLP_MIN_186();
9344 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9345 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
9346 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
9347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9348 IEM_MC_DEFER_TO_CIMPL_3_RET(0,
9349 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9350 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
9351 iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
9352}
9353
9354
9355/**
9356 * @opcode 0xc9
9357 */
9358FNIEMOP_DEF(iemOp_leave)
9359{
9360 IEMOP_MNEMONIC(leave, "leave");
9361 IEMOP_HLP_MIN_186();
9362 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9364 IEM_MC_DEFER_TO_CIMPL_1_RET(0,
9365 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9366 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
9367 iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
9368}
9369
9370
9371/**
9372 * @opcode 0xca
9373 */
9374FNIEMOP_DEF(iemOp_retf_Iw)
9375{
9376 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
9377 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9379 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
9380 | IEM_CIMPL_F_MODE,
9381 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9382 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9383 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9384 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9385 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9386 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9387 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9388 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9389 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9390 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9391 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9392 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9393 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9394 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9395 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9396 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9397 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9398 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
9399}
9400
9401
9402/**
9403 * @opcode 0xcb
9404 */
9405FNIEMOP_DEF(iemOp_retf)
9406{
9407 IEMOP_MNEMONIC(retf, "retf");
9408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9409 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
9410 | IEM_CIMPL_F_MODE,
9411 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9412 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9413 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9414 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9415 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9416 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9417 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9418 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9419 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9420 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9421 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9422 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9423 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9424 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9425 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9426 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9427 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9428 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
9429}
9430
9431
9432/**
9433 * @opcode 0xcc
9434 */
9435FNIEMOP_DEF(iemOp_int3)
9436{
9437 IEMOP_MNEMONIC(int3, "int3");
9438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9439 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9440 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
9441 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
9442}
9443
9444
9445/**
9446 * @opcode 0xcd
9447 */
9448FNIEMOP_DEF(iemOp_int_Ib)
9449{
9450 IEMOP_MNEMONIC(int_Ib, "int Ib");
9451 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
9452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9453 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9454 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, UINT64_MAX,
9455 iemCImpl_int, u8Int, IEMINT_INTN);
9456 /** @todo make task-switches, ring-switches, ++ return non-zero status */
9457}
9458
9459
9460/**
9461 * @opcode 0xce
9462 */
9463FNIEMOP_DEF(iemOp_into)
9464{
9465 IEMOP_MNEMONIC(into, "into");
9466 IEMOP_HLP_NO_64BIT();
9467 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9468 | IEM_CIMPL_F_BRANCH_CONDITIONAL | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
9469 UINT64_MAX,
9470 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
9471 /** @todo make task-switches, ring-switches, ++ return non-zero status */
9472}
9473
9474
9475/**
9476 * @opcode 0xcf
9477 */
9478FNIEMOP_DEF(iemOp_iret)
9479{
9480 IEMOP_MNEMONIC(iret, "iret");
9481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9482 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9483 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
9484 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9485 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9486 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9487 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9488 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9489 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9490 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9491 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9492 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9493 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9494 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9495 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9496 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9497 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9498 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9499 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9500 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9501 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
9502 /* Segment registers are sanitized when returning to an outer ring, or fully
9503 reloaded when returning to v86 mode. Thus the large flush list above. */
9504}
9505
9506
9507/**
9508 * @opcode 0xd0
9509 */
9510FNIEMOP_DEF(iemOp_Grp2_Eb_1)
9511{
9512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9513
9514 /* Need to use a body macro here since the EFLAGS behaviour differs between
9515 the shifts, rotates and rotate w/ carry. Sigh. */
9516#define GRP2_BODY_Eb_1(a_pImplExpr) \
9517 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9518 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9519 { \
9520 /* register */ \
9521 IEM_MC_BEGIN(3, 0, 0, 0); \
9522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9523 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9524 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
9525 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9526 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9527 IEM_MC_REF_EFLAGS(pEFlags); \
9528 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9529 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9530 IEM_MC_END(); \
9531 } \
9532 else \
9533 { \
9534 /* memory */ \
9535 IEM_MC_BEGIN(3, 3, 0, 0); \
9536 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9537 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
9538 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9540 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9541 \
9542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9544 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9545 IEM_MC_FETCH_EFLAGS(EFlags); \
9546 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9547 \
9548 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9549 IEM_MC_COMMIT_EFLAGS(EFlags); \
9550 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9551 IEM_MC_END(); \
9552 } (void)0
9553
9554 switch (IEM_GET_MODRM_REG_8(bRm))
9555 {
9556 /**
9557 * @opdone
9558 * @opmaps grp2_d0
9559 * @opcode /0
9560 * @opflclass rotate_1
9561 */
9562 case 0:
9563 {
9564 IEMOP_MNEMONIC2(M1, ROL, rol, Eb, 1, DISOPTYPE_HARMLESS, 0);
9565 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9566 break;
9567 }
9568 /**
9569 * @opdone
9570 * @opmaps grp2_d0
9571 * @opcode /1
9572 * @opflclass rotate_1
9573 */
9574 case 1:
9575 {
9576 IEMOP_MNEMONIC2(M1, ROR, ror, Eb, 1, DISOPTYPE_HARMLESS, 0);
9577 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9578 break;
9579 }
9580 /**
9581 * @opdone
9582 * @opmaps grp2_d0
9583 * @opcode /2
9584 * @opflclass rotate_carry_1
9585 */
9586 case 2:
9587 {
9588 IEMOP_MNEMONIC2(M1, RCL, rcl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9589 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9590 break;
9591 }
9592 /**
9593 * @opdone
9594 * @opmaps grp2_d0
9595 * @opcode /3
9596 * @opflclass rotate_carry_1
9597 */
9598 case 3:
9599 {
9600 IEMOP_MNEMONIC2(M1, RCR, rcr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9601 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9602 break;
9603 }
9604 /**
9605 * @opdone
9606 * @opmaps grp2_d0
9607 * @opcode /4
9608 * @opflclass shift_1
9609 */
9610 case 4:
9611 {
9612 IEMOP_MNEMONIC2(M1, SHL, shl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9613 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9614 break;
9615 }
9616 /**
9617 * @opdone
9618 * @opmaps grp2_d0
9619 * @opcode /5
9620 * @opflclass shift_1
9621 */
9622 case 5:
9623 {
9624 IEMOP_MNEMONIC2(M1, SHR, shr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9625 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9626 break;
9627 }
9628 /**
9629 * @opdone
9630 * @opmaps grp2_d0
9631 * @opcode /7
9632 * @opflclass shift_1
9633 */
9634 case 7:
9635 {
9636 IEMOP_MNEMONIC2(M1, SAR, sar, Eb, 1, DISOPTYPE_HARMLESS, 0);
9637 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9638 break;
9639 }
9640 /** @opdone */
9641 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9642 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9643 }
9644#undef GRP2_BODY_Eb_1
9645}
9646
9647
9648/* Need to use a body macro here since the EFLAGS behaviour differs between
9649 the shifts, rotates and rotate w/ carry. Sigh. */
9650#define GRP2_BODY_Ev_1(a_pImplExpr) \
9651 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9652 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9653 { \
9654 /* register */ \
9655 switch (pVCpu->iem.s.enmEffOpSize) \
9656 { \
9657 case IEMMODE_16BIT: \
9658 IEM_MC_BEGIN(3, 0, 0, 0); \
9659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9660 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9661 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9662 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9663 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9664 IEM_MC_REF_EFLAGS(pEFlags); \
9665 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9666 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9667 IEM_MC_END(); \
9668 break; \
9669 \
9670 case IEMMODE_32BIT: \
9671 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9673 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9674 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9675 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9676 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9677 IEM_MC_REF_EFLAGS(pEFlags); \
9678 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9679 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9680 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9681 IEM_MC_END(); \
9682 break; \
9683 \
9684 case IEMMODE_64BIT: \
9685 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9687 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9688 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9689 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9690 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9691 IEM_MC_REF_EFLAGS(pEFlags); \
9692 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9693 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9694 IEM_MC_END(); \
9695 break; \
9696 \
9697 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9698 } \
9699 } \
9700 else \
9701 { \
9702 /* memory */ \
9703 switch (pVCpu->iem.s.enmEffOpSize) \
9704 { \
9705 case IEMMODE_16BIT: \
9706 IEM_MC_BEGIN(3, 3, 0, 0); \
9707 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9708 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9709 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9711 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9712 \
9713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9715 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9716 IEM_MC_FETCH_EFLAGS(EFlags); \
9717 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9718 \
9719 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9720 IEM_MC_COMMIT_EFLAGS(EFlags); \
9721 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9722 IEM_MC_END(); \
9723 break; \
9724 \
9725 case IEMMODE_32BIT: \
9726 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
9727 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9728 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9729 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9731 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9732 \
9733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9735 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9736 IEM_MC_FETCH_EFLAGS(EFlags); \
9737 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9738 \
9739 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9740 IEM_MC_COMMIT_EFLAGS(EFlags); \
9741 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9742 IEM_MC_END(); \
9743 break; \
9744 \
9745 case IEMMODE_64BIT: \
9746 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
9747 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9748 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9749 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9751 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9752 \
9753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9755 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9756 IEM_MC_FETCH_EFLAGS(EFlags); \
9757 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9758 \
9759 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9760 IEM_MC_COMMIT_EFLAGS(EFlags); \
9761 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9762 IEM_MC_END(); \
9763 break; \
9764 \
9765 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9766 } \
9767 } (void)0
9768
9769/**
9770 * @opmaps grp2_d1
9771 * @opcode /0
9772 * @opflclass rotate_1
9773 */
9774FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_1, uint8_t, bRm)
9775{
9776 IEMOP_MNEMONIC2(M1, ROL, rol, Ev, 1, DISOPTYPE_HARMLESS, 0);
9777 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9778}
9779
9780
9781/**
9782 * @opmaps grp2_d1
9783 * @opcode /1
9784 * @opflclass rotate_1
9785 */
9786FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_1, uint8_t, bRm)
9787{
9788 IEMOP_MNEMONIC2(M1, ROR, ror, Ev, 1, DISOPTYPE_HARMLESS, 0);
9789 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9790}
9791
9792
9793/**
9794 * @opmaps grp2_d1
9795 * @opcode /2
9796 * @opflclass rotate_carry_1
9797 */
9798FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_1, uint8_t, bRm)
9799{
9800 IEMOP_MNEMONIC2(M1, RCL, rcl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9801 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9802}
9803
9804
9805/**
9806 * @opmaps grp2_d1
9807 * @opcode /3
9808 * @opflclass rotate_carry_1
9809 */
9810FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_1, uint8_t, bRm)
9811{
9812 IEMOP_MNEMONIC2(M1, RCR, rcr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9813 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9814}
9815
9816
9817/**
9818 * @opmaps grp2_d1
9819 * @opcode /4
9820 * @opflclass shift_1
9821 */
9822FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_1, uint8_t, bRm)
9823{
9824 IEMOP_MNEMONIC2(M1, SHL, shl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9825 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9826}
9827
9828
9829/**
9830 * @opmaps grp2_d1
9831 * @opcode /5
9832 * @opflclass shift_1
9833 */
9834FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_1, uint8_t, bRm)
9835{
9836 IEMOP_MNEMONIC2(M1, SHR, shr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9837 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9838}
9839
9840
9841/**
9842 * @opmaps grp2_d1
9843 * @opcode /7
9844 * @opflclass shift_1
9845 */
9846FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_1, uint8_t, bRm)
9847{
9848 IEMOP_MNEMONIC2(M1, SAR, sar, Ev, 1, DISOPTYPE_HARMLESS, 0);
9849 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9850}
9851
9852#undef GRP2_BODY_Ev_1
9853
9854/**
9855 * @opcode 0xd1
9856 */
9857FNIEMOP_DEF(iemOp_Grp2_Ev_1)
9858{
9859 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9860 switch (IEM_GET_MODRM_REG_8(bRm))
9861 {
9862 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_1, bRm);
9863 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_1, bRm);
9864 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_1, bRm);
9865 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_1, bRm);
9866 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_1, bRm);
9867 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_1, bRm);
9868 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_1, bRm);
9869 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9870 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9871 }
9872}
9873
9874
9875/**
9876 * @opcode 0xd2
9877 */
9878FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
9879{
9880 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9881
9882 /* Need to use a body macro here since the EFLAGS behaviour differs between
9883 the shifts, rotates and rotate w/ carry. Sigh. */
9884#define GRP2_BODY_Eb_CL(a_pImplExpr) \
9885 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9886 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9887 { \
9888 /* register */ \
9889 IEM_MC_BEGIN(3, 0, 0, 0); \
9890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9891 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9892 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9893 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9894 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9895 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9896 IEM_MC_REF_EFLAGS(pEFlags); \
9897 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9898 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9899 IEM_MC_END(); \
9900 } \
9901 else \
9902 { \
9903 /* memory */ \
9904 IEM_MC_BEGIN(3, 3, 0, 0); \
9905 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9906 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9907 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9909 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9910 \
9911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9913 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9914 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9915 IEM_MC_FETCH_EFLAGS(EFlags); \
9916 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9917 \
9918 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9919 IEM_MC_COMMIT_EFLAGS(EFlags); \
9920 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9921 IEM_MC_END(); \
9922 } (void)0
9923
9924 switch (IEM_GET_MODRM_REG_8(bRm))
9925 {
9926 /**
9927 * @opdone
9928 * @opmaps grp2_d0
9929 * @opcode /0
9930 * @opflclass rotate_count
9931 */
9932 case 0:
9933 {
9934 IEMOP_MNEMONIC2EX(rol_Eb_CL, "rol Eb,CL", M_CL, ROL, rol, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9935 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9936 break;
9937 }
9938 /**
9939 * @opdone
9940 * @opmaps grp2_d0
9941 * @opcode /1
9942 * @opflclass rotate_count
9943 */
9944 case 1:
9945 {
9946 IEMOP_MNEMONIC2EX(ror_Eb_CL, "ror Eb,CL", M_CL, ROR, ror, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9947 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9948 break;
9949 }
9950 /**
9951 * @opdone
9952 * @opmaps grp2_d0
9953 * @opcode /2
9954 * @opflclass rotate_carry_count
9955 */
9956 case 2:
9957 {
9958 IEMOP_MNEMONIC2EX(rcl_Eb_CL, "rcl Eb,CL", M_CL, RCL, rcl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9959 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9960 break;
9961 }
9962 /**
9963 * @opdone
9964 * @opmaps grp2_d0
9965 * @opcode /3
9966 * @opflclass rotate_carry_count
9967 */
9968 case 3:
9969 {
9970 IEMOP_MNEMONIC2EX(rcr_Eb_CL, "rcr Eb,CL", M_CL, RCR, rcr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9971 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9972 break;
9973 }
9974 /**
9975 * @opdone
9976 * @opmaps grp2_d0
9977 * @opcode /4
9978 * @opflclass shift_count
9979 */
9980 case 4:
9981 {
9982 IEMOP_MNEMONIC2EX(shl_Eb_CL, "shl Eb,CL", M_CL, SHL, shl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9983 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9984 break;
9985 }
9986 /**
9987 * @opdone
9988 * @opmaps grp2_d0
9989 * @opcode /5
9990 * @opflclass shift_count
9991 */
9992 case 5:
9993 {
9994 IEMOP_MNEMONIC2EX(shr_Eb_CL, "shr Eb,CL", M_CL, SHR, shr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9995 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9996 break;
9997 }
9998 /**
9999 * @opdone
10000 * @opmaps grp2_d0
10001 * @opcode /7
10002 * @opflclass shift_count
10003 */
10004 case 7:
10005 {
10006 IEMOP_MNEMONIC2EX(sar_Eb_CL, "sar Eb,CL", M_CL, SAR, sar, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
10007 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
10008 break;
10009 }
10010 /** @opdone */
10011 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10012 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
10013 }
10014#undef GRP2_BODY_Eb_CL
10015}
10016
10017
10018/* Need to use a body macro here since the EFLAGS behaviour differs between
10019 the shifts, rotates and rotate w/ carry. Sigh. */
10020#define GRP2_BODY_Ev_CL(a_pImplExpr) \
10021 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
10022 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10023 { \
10024 /* register */ \
10025 switch (pVCpu->iem.s.enmEffOpSize) \
10026 { \
10027 case IEMMODE_16BIT: \
10028 IEM_MC_BEGIN(3, 0, 0, 0); \
10029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10030 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10031 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
10032 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10033 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10034 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10035 IEM_MC_REF_EFLAGS(pEFlags); \
10036 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
10037 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10038 IEM_MC_END(); \
10039 break; \
10040 \
10041 case IEMMODE_32BIT: \
10042 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
10043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10044 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10045 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
10046 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10047 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10048 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10049 IEM_MC_REF_EFLAGS(pEFlags); \
10050 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
10051 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
10052 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10053 IEM_MC_END(); \
10054 break; \
10055 \
10056 case IEMMODE_64BIT: \
10057 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
10058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10059 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10060 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
10061 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10062 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10063 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10064 IEM_MC_REF_EFLAGS(pEFlags); \
10065 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
10066 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10067 IEM_MC_END(); \
10068 break; \
10069 \
10070 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10071 } \
10072 } \
10073 else \
10074 { \
10075 /* memory */ \
10076 switch (pVCpu->iem.s.enmEffOpSize) \
10077 { \
10078 case IEMMODE_16BIT: \
10079 IEM_MC_BEGIN(3, 3, 0, 0); \
10080 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10081 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
10082 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
10083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10084 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10085 \
10086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10088 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10089 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10090 IEM_MC_FETCH_EFLAGS(EFlags); \
10091 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
10092 \
10093 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10094 IEM_MC_COMMIT_EFLAGS(EFlags); \
10095 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10096 IEM_MC_END(); \
10097 break; \
10098 \
10099 case IEMMODE_32BIT: \
10100 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
10101 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10102 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
10103 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
10104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10105 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10106 \
10107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10109 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10110 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10111 IEM_MC_FETCH_EFLAGS(EFlags); \
10112 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
10113 \
10114 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10115 IEM_MC_COMMIT_EFLAGS(EFlags); \
10116 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10117 IEM_MC_END(); \
10118 break; \
10119 \
10120 case IEMMODE_64BIT: \
10121 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
10122 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10123 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
10124 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
10125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10126 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10127 \
10128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10130 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
10131 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10132 IEM_MC_FETCH_EFLAGS(EFlags); \
10133 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
10134 \
10135 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10136 IEM_MC_COMMIT_EFLAGS(EFlags); \
10137 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10138 IEM_MC_END(); \
10139 break; \
10140 \
10141 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10142 } \
10143 } (void)0
10144
10145
10146/**
10147 * @opmaps grp2_d0
10148 * @opcode /0
10149 * @opflclass rotate_count
10150 */
10151FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_CL, uint8_t, bRm)
10152{
10153 IEMOP_MNEMONIC2EX(rol_Ev_CL, "rol Ev,CL", M_CL, ROL, rol, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10154 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
10155}
10156
10157
10158/**
10159 * @opmaps grp2_d0
10160 * @opcode /1
10161 * @opflclass rotate_count
10162 */
10163FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_CL, uint8_t, bRm)
10164{
10165 IEMOP_MNEMONIC2EX(ror_Ev_CL, "ror Ev,CL", M_CL, ROR, ror, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10166 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
10167}
10168
10169
10170/**
10171 * @opmaps grp2_d0
10172 * @opcode /2
10173 * @opflclass rotate_carry_count
10174 */
10175FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_CL, uint8_t, bRm)
10176{
10177 IEMOP_MNEMONIC2EX(rcl_Ev_CL, "rcl Ev,CL", M_CL, RCL, rcl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10178 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
10179}
10180
10181
10182/**
10183 * @opmaps grp2_d0
10184 * @opcode /3
10185 * @opflclass rotate_carry_count
10186 */
10187FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_CL, uint8_t, bRm)
10188{
10189 IEMOP_MNEMONIC2EX(rcr_Ev_CL, "rcr Ev,CL", M_CL, RCR, rcr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10190 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
10191}
10192
10193
10194/**
10195 * @opmaps grp2_d0
10196 * @opcode /4
10197 * @opflclass shift_count
10198 */
10199FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_CL, uint8_t, bRm)
10200{
10201 IEMOP_MNEMONIC2EX(shl_Ev_CL, "shl Ev,CL", M_CL, SHL, shl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10202 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
10203}
10204
10205
10206/**
10207 * @opmaps grp2_d0
10208 * @opcode /5
10209 * @opflclass shift_count
10210 */
10211FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_CL, uint8_t, bRm)
10212{
10213 IEMOP_MNEMONIC2EX(shr_Ev_CL, "shr Ev,CL", M_CL, SHR, shr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10214 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
10215}
10216
10217
10218/**
10219 * @opmaps grp2_d0
10220 * @opcode /7
10221 * @opflclass shift_count
10222 */
10223FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_CL, uint8_t, bRm)
10224{
10225 IEMOP_MNEMONIC2EX(sar_Ev_CL, "sar Ev,CL", M_CL, SAR, sar, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
10226 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
10227}
10228
10229#undef GRP2_BODY_Ev_CL
10230
10231/**
10232 * @opcode 0xd3
10233 */
10234FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
10235{
10236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10237 switch (IEM_GET_MODRM_REG_8(bRm))
10238 {
10239 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_CL, bRm);
10240 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_CL, bRm);
10241 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_CL, bRm);
10242 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_CL, bRm);
10243 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_CL, bRm);
10244 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_CL, bRm);
10245 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_CL, bRm);
10246 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10247 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
10248 }
10249}
10250
10251
10252/**
10253 * @opcode 0xd4
10254 * @opflmodify cf,pf,af,zf,sf,of
10255 * @opflundef cf,af,of
10256 */
10257FNIEMOP_DEF(iemOp_aam_Ib)
10258{
10259/** @todo testcase: aam */
10260 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
10261 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
10262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10263 IEMOP_HLP_NO_64BIT();
10264 if (!bImm)
10265 IEMOP_RAISE_DIVIDE_ERROR_RET();
10266 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aam, bImm);
10267}
10268
10269
10270/**
10271 * @opcode 0xd5
10272 * @opflmodify cf,pf,af,zf,sf,of
10273 * @opflundef cf,af,of
10274 */
10275FNIEMOP_DEF(iemOp_aad_Ib)
10276{
10277/** @todo testcase: aad? */
10278 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
10279 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
10280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10281 IEMOP_HLP_NO_64BIT();
10282 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aad, bImm);
10283}
10284
10285
10286/**
10287 * @opcode 0xd6
10288 */
10289FNIEMOP_DEF(iemOp_salc)
10290{
10291 IEMOP_MNEMONIC(salc, "salc");
10292 IEMOP_HLP_NO_64BIT();
10293
10294 IEM_MC_BEGIN(0, 0, 0, 0);
10295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10296 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10297 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
10298 } IEM_MC_ELSE() {
10299 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
10300 } IEM_MC_ENDIF();
10301 IEM_MC_ADVANCE_RIP_AND_FINISH();
10302 IEM_MC_END();
10303}
10304
10305
10306/**
10307 * @opcode 0xd7
10308 */
10309FNIEMOP_DEF(iemOp_xlat)
10310{
10311 IEMOP_MNEMONIC(xlat, "xlat");
10312 switch (pVCpu->iem.s.enmEffAddrMode)
10313 {
10314 case IEMMODE_16BIT:
10315 IEM_MC_BEGIN(2, 0, 0, 0);
10316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10317 IEM_MC_LOCAL(uint8_t, u8Tmp);
10318 IEM_MC_LOCAL(uint16_t, u16Addr);
10319 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
10320 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
10321 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
10322 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10323 IEM_MC_ADVANCE_RIP_AND_FINISH();
10324 IEM_MC_END();
10325 break;
10326
10327 case IEMMODE_32BIT:
10328 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
10329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10330 IEM_MC_LOCAL(uint8_t, u8Tmp);
10331 IEM_MC_LOCAL(uint32_t, u32Addr);
10332 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
10333 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
10334 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
10335 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10336 IEM_MC_ADVANCE_RIP_AND_FINISH();
10337 IEM_MC_END();
10338 break;
10339
10340 case IEMMODE_64BIT:
10341 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
10342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10343 IEM_MC_LOCAL(uint8_t, u8Tmp);
10344 IEM_MC_LOCAL(uint64_t, u64Addr);
10345 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
10346 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
10347 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
10348 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10349 IEM_MC_ADVANCE_RIP_AND_FINISH();
10350 IEM_MC_END();
10351 break;
10352
10353 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10354 }
10355}
10356
10357
10358/**
10359 * Common worker for FPU instructions working on ST0 and STn, and storing the
10360 * result in ST0.
10361 *
10362 * @param bRm Mod R/M byte.
10363 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10364 */
10365FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10366{
10367 IEM_MC_BEGIN(3, 1, 0, 0);
10368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10369 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10370 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10371 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10372 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10373
10374 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10375 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10376 IEM_MC_PREPARE_FPU_USAGE();
10377 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10378 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10379 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10380 } IEM_MC_ELSE() {
10381 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10382 } IEM_MC_ENDIF();
10383 IEM_MC_ADVANCE_RIP_AND_FINISH();
10384
10385 IEM_MC_END();
10386}
10387
10388
10389/**
10390 * Common worker for FPU instructions working on ST0 and STn, and only affecting
10391 * flags.
10392 *
10393 * @param bRm Mod R/M byte.
10394 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10395 */
10396FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10397{
10398 IEM_MC_BEGIN(3, 1, 0, 0);
10399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10400 IEM_MC_LOCAL(uint16_t, u16Fsw);
10401 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10402 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10403 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10404
10405 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10406 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10407 IEM_MC_PREPARE_FPU_USAGE();
10408 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10409 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10410 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10411 } IEM_MC_ELSE() {
10412 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10413 } IEM_MC_ENDIF();
10414 IEM_MC_ADVANCE_RIP_AND_FINISH();
10415
10416 IEM_MC_END();
10417}
10418
10419
10420/**
10421 * Common worker for FPU instructions working on ST0 and STn, only affecting
10422 * flags, and popping when done.
10423 *
10424 * @param bRm Mod R/M byte.
10425 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10426 */
10427FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10428{
10429 IEM_MC_BEGIN(3, 1, 0, 0);
10430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10431 IEM_MC_LOCAL(uint16_t, u16Fsw);
10432 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10433 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10434 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10435
10436 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10437 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10438 IEM_MC_PREPARE_FPU_USAGE();
10439 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10440 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10441 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10442 } IEM_MC_ELSE() {
10443 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10444 } IEM_MC_ENDIF();
10445 IEM_MC_ADVANCE_RIP_AND_FINISH();
10446
10447 IEM_MC_END();
10448}
10449
10450
10451/** Opcode 0xd8 11/0. */
10452FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
10453{
10454 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
10455 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
10456}
10457
10458
10459/** Opcode 0xd8 11/1. */
10460FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
10461{
10462 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
10463 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
10464}
10465
10466
10467/** Opcode 0xd8 11/2. */
10468FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
10469{
10470 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
10471 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
10472}
10473
10474
10475/** Opcode 0xd8 11/3. */
10476FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
10477{
10478 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
10479 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
10480}
10481
10482
10483/** Opcode 0xd8 11/4. */
10484FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
10485{
10486 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
10487 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
10488}
10489
10490
10491/** Opcode 0xd8 11/5. */
10492FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
10493{
10494 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
10495 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
10496}
10497
10498
10499/** Opcode 0xd8 11/6. */
10500FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
10501{
10502 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
10503 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
10504}
10505
10506
10507/** Opcode 0xd8 11/7. */
10508FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
10509{
10510 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
10511 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
10512}
10513
10514
10515/**
10516 * Common worker for FPU instructions working on ST0 and an m32r, and storing
10517 * the result in ST0.
10518 *
10519 * @param bRm Mod R/M byte.
10520 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10521 */
10522FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
10523{
10524 IEM_MC_BEGIN(3, 3, 0, 0);
10525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10526 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10527 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10528 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10529 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10530 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10531
10532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10534
10535 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10536 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10537 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10538
10539 IEM_MC_PREPARE_FPU_USAGE();
10540 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10541 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
10542 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10543 } IEM_MC_ELSE() {
10544 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10545 } IEM_MC_ENDIF();
10546 IEM_MC_ADVANCE_RIP_AND_FINISH();
10547
10548 IEM_MC_END();
10549}
10550
10551
10552/** Opcode 0xd8 !11/0. */
10553FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
10554{
10555 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
10556 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
10557}
10558
10559
10560/** Opcode 0xd8 !11/1. */
10561FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
10562{
10563 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
10564 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
10565}
10566
10567
10568/** Opcode 0xd8 !11/2. */
10569FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
10570{
10571 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
10572
10573 IEM_MC_BEGIN(3, 3, 0, 0);
10574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10575 IEM_MC_LOCAL(uint16_t, u16Fsw);
10576 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10577 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10578 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10579 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10580
10581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10583
10584 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10585 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10586 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10587
10588 IEM_MC_PREPARE_FPU_USAGE();
10589 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10590 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10591 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10592 } IEM_MC_ELSE() {
10593 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10594 } IEM_MC_ENDIF();
10595 IEM_MC_ADVANCE_RIP_AND_FINISH();
10596
10597 IEM_MC_END();
10598}
10599
10600
10601/** Opcode 0xd8 !11/3. */
10602FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
10603{
10604 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
10605
10606 IEM_MC_BEGIN(3, 3, 0, 0);
10607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10608 IEM_MC_LOCAL(uint16_t, u16Fsw);
10609 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10610 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10611 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10612 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10613
10614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10616
10617 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10618 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10619 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10620
10621 IEM_MC_PREPARE_FPU_USAGE();
10622 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10623 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10624 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10625 } IEM_MC_ELSE() {
10626 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10627 } IEM_MC_ENDIF();
10628 IEM_MC_ADVANCE_RIP_AND_FINISH();
10629
10630 IEM_MC_END();
10631}
10632
10633
10634/** Opcode 0xd8 !11/4. */
10635FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
10636{
10637 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
10638 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
10639}
10640
10641
10642/** Opcode 0xd8 !11/5. */
10643FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
10644{
10645 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
10646 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
10647}
10648
10649
10650/** Opcode 0xd8 !11/6. */
10651FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
10652{
10653 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
10654 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
10655}
10656
10657
10658/** Opcode 0xd8 !11/7. */
10659FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
10660{
10661 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
10662 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
10663}
10664
10665
10666/**
10667 * @opcode 0xd8
10668 */
10669FNIEMOP_DEF(iemOp_EscF0)
10670{
10671 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10672 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
10673
10674 if (IEM_IS_MODRM_REG_MODE(bRm))
10675 {
10676 switch (IEM_GET_MODRM_REG_8(bRm))
10677 {
10678 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
10679 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
10680 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
10681 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
10682 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
10683 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
10684 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
10685 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
10686 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10687 }
10688 }
10689 else
10690 {
10691 switch (IEM_GET_MODRM_REG_8(bRm))
10692 {
10693 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
10694 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
10695 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
10696 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
10697 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
10698 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
10699 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
10700 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
10701 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10702 }
10703 }
10704}
10705
10706
10707/** Opcode 0xd9 /0 mem32real
10708 * @sa iemOp_fld_m64r */
10709FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
10710{
10711 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
10712
10713 IEM_MC_BEGIN(2, 3, 0, 0);
10714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10715 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10716 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
10717 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10718 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
10719
10720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10722
10723 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10724 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10725 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10726 IEM_MC_PREPARE_FPU_USAGE();
10727 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10728 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
10729 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10730 } IEM_MC_ELSE() {
10731 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10732 } IEM_MC_ENDIF();
10733 IEM_MC_ADVANCE_RIP_AND_FINISH();
10734
10735 IEM_MC_END();
10736}
10737
10738
10739/** Opcode 0xd9 !11/2 mem32real */
10740FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
10741{
10742 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
10743 IEM_MC_BEGIN(3, 3, 0, 0);
10744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10746
10747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10748 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10749 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10750 IEM_MC_PREPARE_FPU_USAGE();
10751
10752 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10753 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10754 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10755
10756 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10757 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10758 IEM_MC_LOCAL(uint16_t, u16Fsw);
10759 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10760 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10761 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10762 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10763 } IEM_MC_ELSE() {
10764 IEM_MC_IF_FCW_IM() {
10765 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10766 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10767 } IEM_MC_ELSE() {
10768 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10769 } IEM_MC_ENDIF();
10770 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10771 } IEM_MC_ENDIF();
10772 IEM_MC_ADVANCE_RIP_AND_FINISH();
10773
10774 IEM_MC_END();
10775}
10776
10777
10778/** Opcode 0xd9 !11/3 */
10779FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
10780{
10781 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
10782 IEM_MC_BEGIN(3, 3, 0, 0);
10783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10785
10786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10787 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10788 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10789 IEM_MC_PREPARE_FPU_USAGE();
10790
10791 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10792 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10793 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10794
10795 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10796 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10797 IEM_MC_LOCAL(uint16_t, u16Fsw);
10798 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10799 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10800 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10801 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10802 } IEM_MC_ELSE() {
10803 IEM_MC_IF_FCW_IM() {
10804 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10805 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10806 } IEM_MC_ELSE() {
10807 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10808 } IEM_MC_ENDIF();
10809 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10810 } IEM_MC_ENDIF();
10811 IEM_MC_ADVANCE_RIP_AND_FINISH();
10812
10813 IEM_MC_END();
10814}
10815
10816
10817/** Opcode 0xd9 !11/4 */
10818FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
10819{
10820 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
10821 IEM_MC_BEGIN(3, 0, 0, 0);
10822 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
10823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10824
10825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10826 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10827 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10828
10829 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10830 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
10831 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw),
10832 iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
10833 IEM_MC_END();
10834}
10835
10836
10837/** Opcode 0xd9 !11/5 */
10838FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
10839{
10840 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
10841 IEM_MC_BEGIN(1, 1, 0, 0);
10842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10844
10845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10846 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10847 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10848
10849 IEM_MC_ARG(uint16_t, u16Fsw, 0);
10850 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10851
10852 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw),
10853 iemCImpl_fldcw, u16Fsw);
10854 IEM_MC_END();
10855}
10856
10857
10858/** Opcode 0xd9 !11/6 */
10859FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
10860{
10861 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
10862 IEM_MC_BEGIN(3, 0, 0, 0);
10863 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
10864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10865
10866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10867 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10868 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10869
10870 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10871 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
10872 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
10873 iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
10874 IEM_MC_END();
10875}
10876
10877
10878/** Opcode 0xd9 !11/7 */
10879FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
10880{
10881 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
10882 IEM_MC_BEGIN(2, 0, 0, 0);
10883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10884 IEM_MC_LOCAL(uint16_t, u16Fcw);
10885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10887 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10888 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10889 IEM_MC_FETCH_FCW(u16Fcw);
10890 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
10891 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10892 IEM_MC_END();
10893}
10894
10895
10896/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
10897FNIEMOP_DEF(iemOp_fnop)
10898{
10899 IEMOP_MNEMONIC(fnop, "fnop");
10900 IEM_MC_BEGIN(0, 0, 0, 0);
10901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10902 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10903 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10904 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10905 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
10906 * intel optimizations. Investigate. */
10907 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10908 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10909 IEM_MC_END();
10910}
10911
10912
10913/** Opcode 0xd9 11/0 stN */
10914FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
10915{
10916 IEMOP_MNEMONIC(fld_stN, "fld stN");
10917 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10918 * indicates that it does. */
10919 IEM_MC_BEGIN(0, 2, 0, 0);
10920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10921 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10922 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10923 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10924 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10925
10926 IEM_MC_PREPARE_FPU_USAGE();
10927 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
10928 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10929 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10930 } IEM_MC_ELSE() {
10931 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
10932 } IEM_MC_ENDIF();
10933
10934 IEM_MC_ADVANCE_RIP_AND_FINISH();
10935 IEM_MC_END();
10936}
10937
10938
10939/** Opcode 0xd9 11/3 stN */
10940FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
10941{
10942 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
10943 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10944 * indicates that it does. */
10945 IEM_MC_BEGIN(2, 3, 0, 0);
10946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10947 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
10948 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
10949 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10950 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
10951 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
10952 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10953 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10954
10955 IEM_MC_PREPARE_FPU_USAGE();
10956 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10957 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
10958 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
10959 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10960 } IEM_MC_ELSE() {
10961 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
10962 } IEM_MC_ENDIF();
10963
10964 IEM_MC_ADVANCE_RIP_AND_FINISH();
10965 IEM_MC_END();
10966}
10967
10968
10969/** Opcode 0xd9 11/4, 0xdd 11/2. */
10970FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
10971{
10972 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
10973
10974 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
10975 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
10976 if (!iDstReg)
10977 {
10978 IEM_MC_BEGIN(0, 1, 0, 0);
10979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10980 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
10981 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10982 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10983
10984 IEM_MC_PREPARE_FPU_USAGE();
10985 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
10986 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10987 } IEM_MC_ELSE() {
10988 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
10989 } IEM_MC_ENDIF();
10990
10991 IEM_MC_ADVANCE_RIP_AND_FINISH();
10992 IEM_MC_END();
10993 }
10994 else
10995 {
10996 IEM_MC_BEGIN(0, 2, 0, 0);
10997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10998 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10999 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11000 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11001 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11002
11003 IEM_MC_PREPARE_FPU_USAGE();
11004 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11005 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11006 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
11007 } IEM_MC_ELSE() {
11008 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
11009 } IEM_MC_ENDIF();
11010
11011 IEM_MC_ADVANCE_RIP_AND_FINISH();
11012 IEM_MC_END();
11013 }
11014}
11015
11016
11017/**
11018 * Common worker for FPU instructions working on ST0 and replaces it with the
11019 * result, i.e. unary operators.
11020 *
11021 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11022 */
11023FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
11024{
11025 IEM_MC_BEGIN(2, 1, 0, 0);
11026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11027 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11028 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11029 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11030
11031 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11032 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11033 IEM_MC_PREPARE_FPU_USAGE();
11034 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11035 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
11036 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11037 } IEM_MC_ELSE() {
11038 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11039 } IEM_MC_ENDIF();
11040 IEM_MC_ADVANCE_RIP_AND_FINISH();
11041
11042 IEM_MC_END();
11043}
11044
11045
11046/** Opcode 0xd9 0xe0. */
11047FNIEMOP_DEF(iemOp_fchs)
11048{
11049 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
11050 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
11051}
11052
11053
11054/** Opcode 0xd9 0xe1. */
11055FNIEMOP_DEF(iemOp_fabs)
11056{
11057 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
11058 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
11059}
11060
11061
11062/** Opcode 0xd9 0xe4. */
11063FNIEMOP_DEF(iemOp_ftst)
11064{
11065 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
11066 IEM_MC_BEGIN(2, 1, 0, 0);
11067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11068 IEM_MC_LOCAL(uint16_t, u16Fsw);
11069 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11070 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11071
11072 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11073 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11074 IEM_MC_PREPARE_FPU_USAGE();
11075 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11076 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
11077 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11078 } IEM_MC_ELSE() {
11079 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
11080 } IEM_MC_ENDIF();
11081 IEM_MC_ADVANCE_RIP_AND_FINISH();
11082
11083 IEM_MC_END();
11084}
11085
11086
11087/** Opcode 0xd9 0xe5. */
11088FNIEMOP_DEF(iemOp_fxam)
11089{
11090 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
11091 IEM_MC_BEGIN(2, 1, 0, 0);
11092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11093 IEM_MC_LOCAL(uint16_t, u16Fsw);
11094 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11095 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11096
11097 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11098 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11099 IEM_MC_PREPARE_FPU_USAGE();
11100 IEM_MC_REF_FPUREG(pr80Value, 0);
11101 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
11102 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11103 IEM_MC_ADVANCE_RIP_AND_FINISH();
11104
11105 IEM_MC_END();
11106}
11107
11108
11109/**
11110 * Common worker for FPU instructions pushing a constant onto the FPU stack.
11111 *
11112 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11113 */
11114FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
11115{
11116 IEM_MC_BEGIN(1, 1, 0, 0);
11117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11118 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11119 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11120
11121 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11122 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11123 IEM_MC_PREPARE_FPU_USAGE();
11124 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11125 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
11126 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
11127 } IEM_MC_ELSE() {
11128 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
11129 } IEM_MC_ENDIF();
11130 IEM_MC_ADVANCE_RIP_AND_FINISH();
11131
11132 IEM_MC_END();
11133}
11134
11135
11136/** Opcode 0xd9 0xe8. */
11137FNIEMOP_DEF(iemOp_fld1)
11138{
11139 IEMOP_MNEMONIC(fld1, "fld1");
11140 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
11141}
11142
11143
11144/** Opcode 0xd9 0xe9. */
11145FNIEMOP_DEF(iemOp_fldl2t)
11146{
11147 IEMOP_MNEMONIC(fldl2t, "fldl2t");
11148 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
11149}
11150
11151
11152/** Opcode 0xd9 0xea. */
11153FNIEMOP_DEF(iemOp_fldl2e)
11154{
11155 IEMOP_MNEMONIC(fldl2e, "fldl2e");
11156 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
11157}
11158
11159/** Opcode 0xd9 0xeb. */
11160FNIEMOP_DEF(iemOp_fldpi)
11161{
11162 IEMOP_MNEMONIC(fldpi, "fldpi");
11163 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
11164}
11165
11166
11167/** Opcode 0xd9 0xec. */
11168FNIEMOP_DEF(iemOp_fldlg2)
11169{
11170 IEMOP_MNEMONIC(fldlg2, "fldlg2");
11171 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
11172}
11173
11174/** Opcode 0xd9 0xed. */
11175FNIEMOP_DEF(iemOp_fldln2)
11176{
11177 IEMOP_MNEMONIC(fldln2, "fldln2");
11178 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
11179}
11180
11181
11182/** Opcode 0xd9 0xee. */
11183FNIEMOP_DEF(iemOp_fldz)
11184{
11185 IEMOP_MNEMONIC(fldz, "fldz");
11186 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
11187}
11188
11189
11190/** Opcode 0xd9 0xf0.
11191 *
11192 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
11193 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
11194 * to produce proper results for +Inf and -Inf.
11195 *
11196 * This is probably usful in the implementation pow() and similar.
11197 */
11198FNIEMOP_DEF(iemOp_f2xm1)
11199{
11200 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
11201 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
11202}
11203
11204
11205/**
11206 * Common worker for FPU instructions working on STn and ST0, storing the result
11207 * in STn, and popping the stack unless IE, DE or ZE was raised.
11208 *
11209 * @param bRm Mod R/M byte.
11210 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11211 */
11212FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11213{
11214 IEM_MC_BEGIN(3, 1, 0, 0);
11215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11216 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11217 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11218 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11219 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11220
11221 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11222 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11223
11224 IEM_MC_PREPARE_FPU_USAGE();
11225 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
11226 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11227 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11228 } IEM_MC_ELSE() {
11229 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11230 } IEM_MC_ENDIF();
11231 IEM_MC_ADVANCE_RIP_AND_FINISH();
11232
11233 IEM_MC_END();
11234}
11235
11236
11237/** Opcode 0xd9 0xf1. */
11238FNIEMOP_DEF(iemOp_fyl2x)
11239{
11240 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
11241 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
11242}
11243
11244
11245/**
11246 * Common worker for FPU instructions working on ST0 and having two outputs, one
11247 * replacing ST0 and one pushed onto the stack.
11248 *
11249 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11250 */
11251FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
11252{
11253 IEM_MC_BEGIN(2, 1, 0, 0);
11254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11255 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
11256 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
11257 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11258
11259 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11260 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11261 IEM_MC_PREPARE_FPU_USAGE();
11262 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11263 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
11264 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
11265 } IEM_MC_ELSE() {
11266 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
11267 } IEM_MC_ENDIF();
11268 IEM_MC_ADVANCE_RIP_AND_FINISH();
11269
11270 IEM_MC_END();
11271}
11272
11273
11274/** Opcode 0xd9 0xf2. */
11275FNIEMOP_DEF(iemOp_fptan)
11276{
11277 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
11278 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
11279}
11280
11281
11282/** Opcode 0xd9 0xf3. */
11283FNIEMOP_DEF(iemOp_fpatan)
11284{
11285 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
11286 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
11287}
11288
11289
11290/** Opcode 0xd9 0xf4. */
11291FNIEMOP_DEF(iemOp_fxtract)
11292{
11293 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
11294 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
11295}
11296
11297
11298/** Opcode 0xd9 0xf5. */
11299FNIEMOP_DEF(iemOp_fprem1)
11300{
11301 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
11302 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
11303}
11304
11305
11306/** Opcode 0xd9 0xf6. */
11307FNIEMOP_DEF(iemOp_fdecstp)
11308{
11309 IEMOP_MNEMONIC(fdecstp, "fdecstp");
11310 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
11311 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
11312 * FINCSTP and FDECSTP. */
11313 IEM_MC_BEGIN(0, 0, 0, 0);
11314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11315
11316 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11317 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11318
11319 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11320 IEM_MC_FPU_STACK_DEC_TOP();
11321 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
11322
11323 IEM_MC_ADVANCE_RIP_AND_FINISH();
11324 IEM_MC_END();
11325}
11326
11327
11328/** Opcode 0xd9 0xf7. */
11329FNIEMOP_DEF(iemOp_fincstp)
11330{
11331 IEMOP_MNEMONIC(fincstp, "fincstp");
11332 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
11333 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
11334 * FINCSTP and FDECSTP. */
11335 IEM_MC_BEGIN(0, 0, 0, 0);
11336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11337
11338 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11339 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11340
11341 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11342 IEM_MC_FPU_STACK_INC_TOP();
11343 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
11344
11345 IEM_MC_ADVANCE_RIP_AND_FINISH();
11346 IEM_MC_END();
11347}
11348
11349
11350/** Opcode 0xd9 0xf8. */
11351FNIEMOP_DEF(iemOp_fprem)
11352{
11353 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
11354 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
11355}
11356
11357
11358/** Opcode 0xd9 0xf9. */
11359FNIEMOP_DEF(iemOp_fyl2xp1)
11360{
11361 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
11362 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
11363}
11364
11365
11366/** Opcode 0xd9 0xfa. */
11367FNIEMOP_DEF(iemOp_fsqrt)
11368{
11369 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
11370 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
11371}
11372
11373
11374/** Opcode 0xd9 0xfb. */
11375FNIEMOP_DEF(iemOp_fsincos)
11376{
11377 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
11378 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
11379}
11380
11381
11382/** Opcode 0xd9 0xfc. */
11383FNIEMOP_DEF(iemOp_frndint)
11384{
11385 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
11386 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
11387}
11388
11389
11390/** Opcode 0xd9 0xfd. */
11391FNIEMOP_DEF(iemOp_fscale)
11392{
11393 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
11394 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
11395}
11396
11397
11398/** Opcode 0xd9 0xfe. */
11399FNIEMOP_DEF(iemOp_fsin)
11400{
11401 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
11402 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
11403}
11404
11405
11406/** Opcode 0xd9 0xff. */
11407FNIEMOP_DEF(iemOp_fcos)
11408{
11409 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
11410 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
11411}
11412
11413
11414/** Used by iemOp_EscF1. */
11415IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
11416{
11417 /* 0xe0 */ iemOp_fchs,
11418 /* 0xe1 */ iemOp_fabs,
11419 /* 0xe2 */ iemOp_Invalid,
11420 /* 0xe3 */ iemOp_Invalid,
11421 /* 0xe4 */ iemOp_ftst,
11422 /* 0xe5 */ iemOp_fxam,
11423 /* 0xe6 */ iemOp_Invalid,
11424 /* 0xe7 */ iemOp_Invalid,
11425 /* 0xe8 */ iemOp_fld1,
11426 /* 0xe9 */ iemOp_fldl2t,
11427 /* 0xea */ iemOp_fldl2e,
11428 /* 0xeb */ iemOp_fldpi,
11429 /* 0xec */ iemOp_fldlg2,
11430 /* 0xed */ iemOp_fldln2,
11431 /* 0xee */ iemOp_fldz,
11432 /* 0xef */ iemOp_Invalid,
11433 /* 0xf0 */ iemOp_f2xm1,
11434 /* 0xf1 */ iemOp_fyl2x,
11435 /* 0xf2 */ iemOp_fptan,
11436 /* 0xf3 */ iemOp_fpatan,
11437 /* 0xf4 */ iemOp_fxtract,
11438 /* 0xf5 */ iemOp_fprem1,
11439 /* 0xf6 */ iemOp_fdecstp,
11440 /* 0xf7 */ iemOp_fincstp,
11441 /* 0xf8 */ iemOp_fprem,
11442 /* 0xf9 */ iemOp_fyl2xp1,
11443 /* 0xfa */ iemOp_fsqrt,
11444 /* 0xfb */ iemOp_fsincos,
11445 /* 0xfc */ iemOp_frndint,
11446 /* 0xfd */ iemOp_fscale,
11447 /* 0xfe */ iemOp_fsin,
11448 /* 0xff */ iemOp_fcos
11449};
11450
11451
11452/**
11453 * @opcode 0xd9
11454 */
11455FNIEMOP_DEF(iemOp_EscF1)
11456{
11457 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11458 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
11459
11460 if (IEM_IS_MODRM_REG_MODE(bRm))
11461 {
11462 switch (IEM_GET_MODRM_REG_8(bRm))
11463 {
11464 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
11465 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
11466 case 2:
11467 if (bRm == 0xd0)
11468 return FNIEMOP_CALL(iemOp_fnop);
11469 IEMOP_RAISE_INVALID_OPCODE_RET();
11470 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
11471 case 4:
11472 case 5:
11473 case 6:
11474 case 7:
11475 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
11476 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
11477 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11478 }
11479 }
11480 else
11481 {
11482 switch (IEM_GET_MODRM_REG_8(bRm))
11483 {
11484 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
11485 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
11486 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
11487 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
11488 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
11489 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
11490 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
11491 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
11492 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11493 }
11494 }
11495}
11496
11497
11498/** Opcode 0xda 11/0. */
11499FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
11500{
11501 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
11502 IEM_MC_BEGIN(0, 1, 0, 0);
11503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11504 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11505
11506 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11507 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11508
11509 IEM_MC_PREPARE_FPU_USAGE();
11510 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11511 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
11512 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11513 } IEM_MC_ENDIF();
11514 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11515 } IEM_MC_ELSE() {
11516 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11517 } IEM_MC_ENDIF();
11518 IEM_MC_ADVANCE_RIP_AND_FINISH();
11519
11520 IEM_MC_END();
11521}
11522
11523
11524/** Opcode 0xda 11/1. */
11525FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
11526{
11527 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
11528 IEM_MC_BEGIN(0, 1, 0, 0);
11529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11530 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11531
11532 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11533 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11534
11535 IEM_MC_PREPARE_FPU_USAGE();
11536 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11537 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
11538 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11539 } IEM_MC_ENDIF();
11540 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11541 } IEM_MC_ELSE() {
11542 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11543 } IEM_MC_ENDIF();
11544 IEM_MC_ADVANCE_RIP_AND_FINISH();
11545
11546 IEM_MC_END();
11547}
11548
11549
11550/** Opcode 0xda 11/2. */
11551FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
11552{
11553 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
11554 IEM_MC_BEGIN(0, 1, 0, 0);
11555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11556 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11557
11558 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11559 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11560
11561 IEM_MC_PREPARE_FPU_USAGE();
11562 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11563 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11564 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11565 } IEM_MC_ENDIF();
11566 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11567 } IEM_MC_ELSE() {
11568 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11569 } IEM_MC_ENDIF();
11570 IEM_MC_ADVANCE_RIP_AND_FINISH();
11571
11572 IEM_MC_END();
11573}
11574
11575
11576/** Opcode 0xda 11/3. */
11577FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
11578{
11579 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
11580 IEM_MC_BEGIN(0, 1, 0, 0);
11581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11582 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11583
11584 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11585 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11586
11587 IEM_MC_PREPARE_FPU_USAGE();
11588 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11589 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
11590 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11591 } IEM_MC_ENDIF();
11592 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11593 } IEM_MC_ELSE() {
11594 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11595 } IEM_MC_ENDIF();
11596 IEM_MC_ADVANCE_RIP_AND_FINISH();
11597
11598 IEM_MC_END();
11599}
11600
11601
11602/**
11603 * Common worker for FPU instructions working on ST0 and ST1, only affecting
11604 * flags, and popping twice when done.
11605 *
11606 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11607 */
11608FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
11609{
11610 IEM_MC_BEGIN(3, 1, 0, 0);
11611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11612 IEM_MC_LOCAL(uint16_t, u16Fsw);
11613 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11614 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11615 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11616
11617 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11618 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11619
11620 IEM_MC_PREPARE_FPU_USAGE();
11621 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
11622 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
11623 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11624 } IEM_MC_ELSE() {
11625 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
11626 } IEM_MC_ENDIF();
11627 IEM_MC_ADVANCE_RIP_AND_FINISH();
11628
11629 IEM_MC_END();
11630}
11631
11632
11633/** Opcode 0xda 0xe9. */
11634FNIEMOP_DEF(iemOp_fucompp)
11635{
11636 IEMOP_MNEMONIC(fucompp, "fucompp");
11637 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
11638}
11639
11640
11641/**
11642 * Common worker for FPU instructions working on ST0 and an m32i, and storing
11643 * the result in ST0.
11644 *
11645 * @param bRm Mod R/M byte.
11646 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11647 */
11648FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
11649{
11650 IEM_MC_BEGIN(3, 3, 0, 0);
11651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11652 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11653 IEM_MC_LOCAL(int32_t, i32Val2);
11654 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11655 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11656 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11657
11658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11660
11661 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11662 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11663 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11664
11665 IEM_MC_PREPARE_FPU_USAGE();
11666 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11667 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
11668 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11669 } IEM_MC_ELSE() {
11670 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11671 } IEM_MC_ENDIF();
11672 IEM_MC_ADVANCE_RIP_AND_FINISH();
11673
11674 IEM_MC_END();
11675}
11676
11677
11678/** Opcode 0xda !11/0. */
11679FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
11680{
11681 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
11682 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
11683}
11684
11685
11686/** Opcode 0xda !11/1. */
11687FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
11688{
11689 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
11690 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
11691}
11692
11693
11694/** Opcode 0xda !11/2. */
11695FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
11696{
11697 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
11698
11699 IEM_MC_BEGIN(3, 3, 0, 0);
11700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11701 IEM_MC_LOCAL(uint16_t, u16Fsw);
11702 IEM_MC_LOCAL(int32_t, i32Val2);
11703 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11704 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11705 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11706
11707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11709
11710 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11711 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11712 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11713
11714 IEM_MC_PREPARE_FPU_USAGE();
11715 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11716 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11717 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11718 } IEM_MC_ELSE() {
11719 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11720 } IEM_MC_ENDIF();
11721 IEM_MC_ADVANCE_RIP_AND_FINISH();
11722
11723 IEM_MC_END();
11724}
11725
11726
11727/** Opcode 0xda !11/3. */
11728FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
11729{
11730 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
11731
11732 IEM_MC_BEGIN(3, 3, 0, 0);
11733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11734 IEM_MC_LOCAL(uint16_t, u16Fsw);
11735 IEM_MC_LOCAL(int32_t, i32Val2);
11736 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11737 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11738 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11739
11740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11742
11743 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11744 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11745 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11746
11747 IEM_MC_PREPARE_FPU_USAGE();
11748 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11749 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11750 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11751 } IEM_MC_ELSE() {
11752 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11753 } IEM_MC_ENDIF();
11754 IEM_MC_ADVANCE_RIP_AND_FINISH();
11755
11756 IEM_MC_END();
11757}
11758
11759
11760/** Opcode 0xda !11/4. */
11761FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
11762{
11763 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
11764 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
11765}
11766
11767
11768/** Opcode 0xda !11/5. */
11769FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
11770{
11771 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
11772 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
11773}
11774
11775
11776/** Opcode 0xda !11/6. */
11777FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
11778{
11779 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
11780 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
11781}
11782
11783
11784/** Opcode 0xda !11/7. */
11785FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
11786{
11787 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
11788 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
11789}
11790
11791
11792/**
11793 * @opcode 0xda
11794 */
11795FNIEMOP_DEF(iemOp_EscF2)
11796{
11797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11798 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
11799 if (IEM_IS_MODRM_REG_MODE(bRm))
11800 {
11801 switch (IEM_GET_MODRM_REG_8(bRm))
11802 {
11803 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
11804 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
11805 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
11806 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
11807 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11808 case 5:
11809 if (bRm == 0xe9)
11810 return FNIEMOP_CALL(iemOp_fucompp);
11811 IEMOP_RAISE_INVALID_OPCODE_RET();
11812 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11813 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11814 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11815 }
11816 }
11817 else
11818 {
11819 switch (IEM_GET_MODRM_REG_8(bRm))
11820 {
11821 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
11822 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
11823 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
11824 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
11825 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
11826 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
11827 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
11828 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
11829 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11830 }
11831 }
11832}
11833
11834
11835/** Opcode 0xdb !11/0. */
11836FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
11837{
11838 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
11839
11840 IEM_MC_BEGIN(2, 3, 0, 0);
11841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11842 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11843 IEM_MC_LOCAL(int32_t, i32Val);
11844 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11845 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
11846
11847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11849
11850 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11851 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11852 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11853
11854 IEM_MC_PREPARE_FPU_USAGE();
11855 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11856 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
11857 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11858 } IEM_MC_ELSE() {
11859 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11860 } IEM_MC_ENDIF();
11861 IEM_MC_ADVANCE_RIP_AND_FINISH();
11862
11863 IEM_MC_END();
11864}
11865
11866
11867/** Opcode 0xdb !11/1. */
11868FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
11869{
11870 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
11871 IEM_MC_BEGIN(3, 3, 0, 0);
11872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11874
11875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11876 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11877 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11878 IEM_MC_PREPARE_FPU_USAGE();
11879
11880 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11881 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11882 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11883
11884 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11885 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11886 IEM_MC_LOCAL(uint16_t, u16Fsw);
11887 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11888 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11889 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11890 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11891 } IEM_MC_ELSE() {
11892 IEM_MC_IF_FCW_IM() {
11893 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11894 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11895 } IEM_MC_ELSE() {
11896 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11897 } IEM_MC_ENDIF();
11898 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11899 } IEM_MC_ENDIF();
11900 IEM_MC_ADVANCE_RIP_AND_FINISH();
11901
11902 IEM_MC_END();
11903}
11904
11905
11906/** Opcode 0xdb !11/2. */
11907FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
11908{
11909 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
11910 IEM_MC_BEGIN(3, 3, 0, 0);
11911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11913
11914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11915 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11916 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11917 IEM_MC_PREPARE_FPU_USAGE();
11918
11919 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11920 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11921 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11922
11923 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11924 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11925 IEM_MC_LOCAL(uint16_t, u16Fsw);
11926 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11927 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11928 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11929 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11930 } IEM_MC_ELSE() {
11931 IEM_MC_IF_FCW_IM() {
11932 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11933 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11934 } IEM_MC_ELSE() {
11935 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11936 } IEM_MC_ENDIF();
11937 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11938 } IEM_MC_ENDIF();
11939 IEM_MC_ADVANCE_RIP_AND_FINISH();
11940
11941 IEM_MC_END();
11942}
11943
11944
11945/** Opcode 0xdb !11/3. */
11946FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
11947{
11948 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
11949 IEM_MC_BEGIN(3, 2, 0, 0);
11950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11952
11953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11954 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11955 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11956 IEM_MC_PREPARE_FPU_USAGE();
11957
11958 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11959 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11960 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11961
11962 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11963 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11964 IEM_MC_LOCAL(uint16_t, u16Fsw);
11965 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11966 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11967 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11968 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11969 } IEM_MC_ELSE() {
11970 IEM_MC_IF_FCW_IM() {
11971 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11972 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11973 } IEM_MC_ELSE() {
11974 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11975 } IEM_MC_ENDIF();
11976 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11977 } IEM_MC_ENDIF();
11978 IEM_MC_ADVANCE_RIP_AND_FINISH();
11979
11980 IEM_MC_END();
11981}
11982
11983
11984/** Opcode 0xdb !11/5. */
11985FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
11986{
11987 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
11988
11989 IEM_MC_BEGIN(2, 3, 0, 0);
11990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11991 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11992 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
11993 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11994 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
11995
11996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11998
11999 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12000 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12001 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12002
12003 IEM_MC_PREPARE_FPU_USAGE();
12004 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12005 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
12006 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12007 } IEM_MC_ELSE() {
12008 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12009 } IEM_MC_ENDIF();
12010 IEM_MC_ADVANCE_RIP_AND_FINISH();
12011
12012 IEM_MC_END();
12013}
12014
12015
12016/** Opcode 0xdb !11/7. */
12017FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
12018{
12019 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
12020 IEM_MC_BEGIN(3, 3, 0, 0);
12021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12023
12024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12025 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12026 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12027 IEM_MC_PREPARE_FPU_USAGE();
12028
12029 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12030 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
12031 IEM_MC_MEM_MAP_R80_WO(pr80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12032
12033 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12034 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12035 IEM_MC_LOCAL(uint16_t, u16Fsw);
12036 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12037 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
12038 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12039 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12040 } IEM_MC_ELSE() {
12041 IEM_MC_IF_FCW_IM() {
12042 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
12043 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12044 } IEM_MC_ELSE() {
12045 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12046 } IEM_MC_ENDIF();
12047 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12048 } IEM_MC_ENDIF();
12049 IEM_MC_ADVANCE_RIP_AND_FINISH();
12050
12051 IEM_MC_END();
12052}
12053
12054
12055/** Opcode 0xdb 11/0. */
12056FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
12057{
12058 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
12059 IEM_MC_BEGIN(0, 1, 0, 0);
12060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12061 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12062
12063 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12064 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12065
12066 IEM_MC_PREPARE_FPU_USAGE();
12067 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
12068 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
12069 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12070 } IEM_MC_ENDIF();
12071 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12072 } IEM_MC_ELSE() {
12073 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12074 } IEM_MC_ENDIF();
12075 IEM_MC_ADVANCE_RIP_AND_FINISH();
12076
12077 IEM_MC_END();
12078}
12079
12080
12081/** Opcode 0xdb 11/1. */
12082FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
12083{
12084 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
12085 IEM_MC_BEGIN(0, 1, 0, 0);
12086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12087 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12088
12089 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12090 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12091
12092 IEM_MC_PREPARE_FPU_USAGE();
12093 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
12094 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12095 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12096 } IEM_MC_ENDIF();
12097 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12098 } IEM_MC_ELSE() {
12099 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12100 } IEM_MC_ENDIF();
12101 IEM_MC_ADVANCE_RIP_AND_FINISH();
12102
12103 IEM_MC_END();
12104}
12105
12106
12107/** Opcode 0xdb 11/2. */
12108FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
12109{
12110 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
12111 IEM_MC_BEGIN(0, 1, 0, 0);
12112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12113 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12114
12115 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12116 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12117
12118 IEM_MC_PREPARE_FPU_USAGE();
12119 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
12120 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
12121 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12122 } IEM_MC_ENDIF();
12123 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12124 } IEM_MC_ELSE() {
12125 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12126 } IEM_MC_ENDIF();
12127 IEM_MC_ADVANCE_RIP_AND_FINISH();
12128
12129 IEM_MC_END();
12130}
12131
12132
12133/** Opcode 0xdb 11/3. */
12134FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
12135{
12136 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
12137 IEM_MC_BEGIN(0, 1, 0, 0);
12138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12139 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12140
12141 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12142 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12143
12144 IEM_MC_PREPARE_FPU_USAGE();
12145 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
12146 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
12147 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12148 } IEM_MC_ENDIF();
12149 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12150 } IEM_MC_ELSE() {
12151 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12152 } IEM_MC_ENDIF();
12153 IEM_MC_ADVANCE_RIP_AND_FINISH();
12154
12155 IEM_MC_END();
12156}
12157
12158
12159/** Opcode 0xdb 0xe0. */
12160FNIEMOP_DEF(iemOp_fneni)
12161{
12162 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
12163 IEM_MC_BEGIN(0, 0, 0, 0);
12164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12165 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12166 IEM_MC_ADVANCE_RIP_AND_FINISH();
12167 IEM_MC_END();
12168}
12169
12170
12171/** Opcode 0xdb 0xe1. */
12172FNIEMOP_DEF(iemOp_fndisi)
12173{
12174 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
12175 IEM_MC_BEGIN(0, 0, 0, 0);
12176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12177 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12178 IEM_MC_ADVANCE_RIP_AND_FINISH();
12179 IEM_MC_END();
12180}
12181
12182
12183/** Opcode 0xdb 0xe2. */
12184FNIEMOP_DEF(iemOp_fnclex)
12185{
12186 IEMOP_MNEMONIC(fnclex, "fnclex");
12187 IEM_MC_BEGIN(0, 0, 0, 0);
12188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12189 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12190 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12191 IEM_MC_CLEAR_FSW_EX();
12192 IEM_MC_ADVANCE_RIP_AND_FINISH();
12193 IEM_MC_END();
12194}
12195
12196
12197/** Opcode 0xdb 0xe3. */
12198FNIEMOP_DEF(iemOp_fninit)
12199{
12200 IEMOP_MNEMONIC(fninit, "fninit");
12201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12202 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12203 iemCImpl_finit, false /*fCheckXcpts*/);
12204}
12205
12206
12207/** Opcode 0xdb 0xe4. */
12208FNIEMOP_DEF(iemOp_fnsetpm)
12209{
12210 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
12211 IEM_MC_BEGIN(0, 0, 0, 0);
12212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12213 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12214 IEM_MC_ADVANCE_RIP_AND_FINISH();
12215 IEM_MC_END();
12216}
12217
12218
12219/** Opcode 0xdb 0xe5. */
12220FNIEMOP_DEF(iemOp_frstpm)
12221{
12222 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
12223#if 0 /* #UDs on newer CPUs */
12224 IEM_MC_BEGIN(0, 0, 0, 0);
12225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12226 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12227 IEM_MC_ADVANCE_RIP_AND_FINISH();
12228 IEM_MC_END();
12229 return VINF_SUCCESS;
12230#else
12231 IEMOP_RAISE_INVALID_OPCODE_RET();
12232#endif
12233}
12234
12235
12236/** Opcode 0xdb 11/5. */
12237FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
12238{
12239 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
12240 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12241 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
12242 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12243}
12244
12245
12246/** Opcode 0xdb 11/6. */
12247FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
12248{
12249 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
12250 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12251 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12252 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12253}
12254
12255
12256/**
12257 * @opcode 0xdb
12258 */
12259FNIEMOP_DEF(iemOp_EscF3)
12260{
12261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12262 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
12263 if (IEM_IS_MODRM_REG_MODE(bRm))
12264 {
12265 switch (IEM_GET_MODRM_REG_8(bRm))
12266 {
12267 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
12268 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
12269 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
12270 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
12271 case 4:
12272 switch (bRm)
12273 {
12274 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
12275 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
12276 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
12277 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
12278 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
12279 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
12280 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
12281 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
12282 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12283 }
12284 break;
12285 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
12286 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
12287 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12288 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12289 }
12290 }
12291 else
12292 {
12293 switch (IEM_GET_MODRM_REG_8(bRm))
12294 {
12295 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
12296 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
12297 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
12298 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
12299 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
12300 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
12301 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
12302 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
12303 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12304 }
12305 }
12306}
12307
12308
12309/**
12310 * Common worker for FPU instructions working on STn and ST0, and storing the
12311 * result in STn unless IE, DE or ZE was raised.
12312 *
12313 * @param bRm Mod R/M byte.
12314 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12315 */
12316FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
12317{
12318 IEM_MC_BEGIN(3, 1, 0, 0);
12319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12320 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12321 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12322 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12323 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12324
12325 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12326 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12327
12328 IEM_MC_PREPARE_FPU_USAGE();
12329 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
12330 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
12331 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12332 } IEM_MC_ELSE() {
12333 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12334 } IEM_MC_ENDIF();
12335 IEM_MC_ADVANCE_RIP_AND_FINISH();
12336
12337 IEM_MC_END();
12338}
12339
12340
12341/** Opcode 0xdc 11/0. */
12342FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
12343{
12344 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
12345 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
12346}
12347
12348
12349/** Opcode 0xdc 11/1. */
12350FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
12351{
12352 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
12353 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
12354}
12355
12356
12357/** Opcode 0xdc 11/4. */
12358FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
12359{
12360 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
12361 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
12362}
12363
12364
12365/** Opcode 0xdc 11/5. */
12366FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
12367{
12368 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
12369 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
12370}
12371
12372
12373/** Opcode 0xdc 11/6. */
12374FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
12375{
12376 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
12377 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
12378}
12379
12380
12381/** Opcode 0xdc 11/7. */
12382FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
12383{
12384 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
12385 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
12386}
12387
12388
12389/**
12390 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
12391 * memory operand, and storing the result in ST0.
12392 *
12393 * @param bRm Mod R/M byte.
12394 * @param pfnImpl Pointer to the instruction implementation (assembly).
12395 */
12396FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
12397{
12398 IEM_MC_BEGIN(3, 3, 0, 0);
12399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12400 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12401 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
12402 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12403 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
12404 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
12405
12406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12408 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12409 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12410
12411 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12412 IEM_MC_PREPARE_FPU_USAGE();
12413 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
12414 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
12415 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12416 } IEM_MC_ELSE() {
12417 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12418 } IEM_MC_ENDIF();
12419 IEM_MC_ADVANCE_RIP_AND_FINISH();
12420
12421 IEM_MC_END();
12422}
12423
12424
12425/** Opcode 0xdc !11/0. */
12426FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
12427{
12428 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
12429 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
12430}
12431
12432
12433/** Opcode 0xdc !11/1. */
12434FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
12435{
12436 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
12437 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
12438}
12439
12440
12441/** Opcode 0xdc !11/2. */
12442FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
12443{
12444 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
12445
12446 IEM_MC_BEGIN(3, 3, 0, 0);
12447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12448 IEM_MC_LOCAL(uint16_t, u16Fsw);
12449 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
12450 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12451 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12452 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
12453
12454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12456
12457 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12458 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12459 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12460
12461 IEM_MC_PREPARE_FPU_USAGE();
12462 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12463 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
12464 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12465 } IEM_MC_ELSE() {
12466 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12467 } IEM_MC_ENDIF();
12468 IEM_MC_ADVANCE_RIP_AND_FINISH();
12469
12470 IEM_MC_END();
12471}
12472
12473
12474/** Opcode 0xdc !11/3. */
12475FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
12476{
12477 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
12478
12479 IEM_MC_BEGIN(3, 3, 0, 0);
12480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12481 IEM_MC_LOCAL(uint16_t, u16Fsw);
12482 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
12483 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12484 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12485 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
12486
12487 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12489
12490 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12491 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12492 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12493
12494 IEM_MC_PREPARE_FPU_USAGE();
12495 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12496 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
12497 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12498 } IEM_MC_ELSE() {
12499 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12500 } IEM_MC_ENDIF();
12501 IEM_MC_ADVANCE_RIP_AND_FINISH();
12502
12503 IEM_MC_END();
12504}
12505
12506
12507/** Opcode 0xdc !11/4. */
12508FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
12509{
12510 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
12511 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
12512}
12513
12514
12515/** Opcode 0xdc !11/5. */
12516FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
12517{
12518 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
12519 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
12520}
12521
12522
12523/** Opcode 0xdc !11/6. */
12524FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
12525{
12526 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
12527 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
12528}
12529
12530
12531/** Opcode 0xdc !11/7. */
12532FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
12533{
12534 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
12535 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
12536}
12537
12538
12539/**
12540 * @opcode 0xdc
12541 */
12542FNIEMOP_DEF(iemOp_EscF4)
12543{
12544 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12545 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
12546 if (IEM_IS_MODRM_REG_MODE(bRm))
12547 {
12548 switch (IEM_GET_MODRM_REG_8(bRm))
12549 {
12550 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
12551 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
12552 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
12553 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
12554 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
12555 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
12556 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
12557 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
12558 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12559 }
12560 }
12561 else
12562 {
12563 switch (IEM_GET_MODRM_REG_8(bRm))
12564 {
12565 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
12566 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
12567 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
12568 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
12569 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
12570 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
12571 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
12572 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
12573 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12574 }
12575 }
12576}
12577
12578
12579/** Opcode 0xdd !11/0.
12580 * @sa iemOp_fld_m32r */
12581FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
12582{
12583 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
12584
12585 IEM_MC_BEGIN(2, 3, 0, 0);
12586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12587 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12588 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
12589 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12590 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
12591
12592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12594 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12595 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12596
12597 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12598 IEM_MC_PREPARE_FPU_USAGE();
12599 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12600 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
12601 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12602 } IEM_MC_ELSE() {
12603 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12604 } IEM_MC_ENDIF();
12605 IEM_MC_ADVANCE_RIP_AND_FINISH();
12606
12607 IEM_MC_END();
12608}
12609
12610
12611/** Opcode 0xdd !11/0. */
12612FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
12613{
12614 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
12615 IEM_MC_BEGIN(3, 3, 0, 0);
12616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12618
12619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12620 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12621 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12622 IEM_MC_PREPARE_FPU_USAGE();
12623
12624 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12625 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12626 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12627
12628 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12629 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12630 IEM_MC_LOCAL(uint16_t, u16Fsw);
12631 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12632 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12633 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12634 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12635 } IEM_MC_ELSE() {
12636 IEM_MC_IF_FCW_IM() {
12637 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12638 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12639 } IEM_MC_ELSE() {
12640 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12641 } IEM_MC_ENDIF();
12642 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12643 } IEM_MC_ENDIF();
12644 IEM_MC_ADVANCE_RIP_AND_FINISH();
12645
12646 IEM_MC_END();
12647}
12648
12649
12650/** Opcode 0xdd !11/0. */
12651FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
12652{
12653 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
12654 IEM_MC_BEGIN(3, 3, 0, 0);
12655 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12657
12658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12659 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12660 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12661 IEM_MC_PREPARE_FPU_USAGE();
12662
12663 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12664 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12665 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12666
12667 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12668 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12669 IEM_MC_LOCAL(uint16_t, u16Fsw);
12670 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12671 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12672 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12673 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12674 } IEM_MC_ELSE() {
12675 IEM_MC_IF_FCW_IM() {
12676 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12677 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12678 } IEM_MC_ELSE() {
12679 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12680 } IEM_MC_ENDIF();
12681 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12682 } IEM_MC_ENDIF();
12683 IEM_MC_ADVANCE_RIP_AND_FINISH();
12684
12685 IEM_MC_END();
12686}
12687
12688
12689
12690
12691/** Opcode 0xdd !11/0. */
12692FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
12693{
12694 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
12695 IEM_MC_BEGIN(3, 3, 0, 0);
12696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12698
12699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12700 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12701 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12702 IEM_MC_PREPARE_FPU_USAGE();
12703
12704 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12705 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12706 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12707
12708 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12709 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12710 IEM_MC_LOCAL(uint16_t, u16Fsw);
12711 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12712 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12713 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12714 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12715 } IEM_MC_ELSE() {
12716 IEM_MC_IF_FCW_IM() {
12717 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12718 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12719 } IEM_MC_ELSE() {
12720 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12721 } IEM_MC_ENDIF();
12722 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12723 } IEM_MC_ENDIF();
12724 IEM_MC_ADVANCE_RIP_AND_FINISH();
12725
12726 IEM_MC_END();
12727}
12728
12729
12730/** Opcode 0xdd !11/0. */
12731FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
12732{
12733 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
12734 IEM_MC_BEGIN(3, 0, 0, 0);
12735 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
12736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12737
12738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12739 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12740 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12741
12742 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12743 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12744 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12745 iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
12746 IEM_MC_END();
12747}
12748
12749
12750/** Opcode 0xdd !11/0. */
12751FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
12752{
12753 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
12754 IEM_MC_BEGIN(3, 0, 0, 0);
12755 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
12756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12757
12758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12759 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12760 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
12761
12762 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12763 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12764 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12765 iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
12766 IEM_MC_END();
12767}
12768
12769/** Opcode 0xdd !11/0. */
12770FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
12771{
12772 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
12773
12774 IEM_MC_BEGIN(0, 2, 0, 0);
12775 IEM_MC_LOCAL(uint16_t, u16Tmp);
12776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12777
12778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12780 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12781
12782 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12783 IEM_MC_FETCH_FSW(u16Tmp);
12784 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
12785 IEM_MC_ADVANCE_RIP_AND_FINISH();
12786
12787/** @todo Debug / drop a hint to the verifier that things may differ
12788 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
12789 * NT4SP1. (X86_FSW_PE) */
12790 IEM_MC_END();
12791}
12792
12793
12794/** Opcode 0xdd 11/0. */
12795FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
12796{
12797 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
12798 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
12799 unmodified. */
12800 IEM_MC_BEGIN(0, 0, 0, 0);
12801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12802
12803 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12804 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12805
12806 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12807 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12808 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12809
12810 IEM_MC_ADVANCE_RIP_AND_FINISH();
12811 IEM_MC_END();
12812}
12813
12814
12815/** Opcode 0xdd 11/1. */
12816FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
12817{
12818 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
12819 IEM_MC_BEGIN(0, 2, 0, 0);
12820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12821 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
12822 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12823 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12824 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12825
12826 IEM_MC_PREPARE_FPU_USAGE();
12827 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12828 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
12829 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12830 } IEM_MC_ELSE() {
12831 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12832 } IEM_MC_ENDIF();
12833
12834 IEM_MC_ADVANCE_RIP_AND_FINISH();
12835 IEM_MC_END();
12836}
12837
12838
12839/** Opcode 0xdd 11/3. */
12840FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
12841{
12842 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
12843 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
12844}
12845
12846
12847/** Opcode 0xdd 11/4. */
12848FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
12849{
12850 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
12851 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
12852}
12853
12854
12855/**
12856 * @opcode 0xdd
12857 */
12858FNIEMOP_DEF(iemOp_EscF5)
12859{
12860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12861 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
12862 if (IEM_IS_MODRM_REG_MODE(bRm))
12863 {
12864 switch (IEM_GET_MODRM_REG_8(bRm))
12865 {
12866 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
12867 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
12868 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
12869 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
12870 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
12871 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
12872 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
12873 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12874 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12875 }
12876 }
12877 else
12878 {
12879 switch (IEM_GET_MODRM_REG_8(bRm))
12880 {
12881 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
12882 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
12883 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
12884 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
12885 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
12886 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
12887 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
12888 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
12889 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12890 }
12891 }
12892}
12893
12894
12895/** Opcode 0xde 11/0. */
12896FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
12897{
12898 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
12899 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
12900}
12901
12902
12903/** Opcode 0xde 11/0. */
12904FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
12905{
12906 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
12907 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
12908}
12909
12910
12911/** Opcode 0xde 0xd9. */
12912FNIEMOP_DEF(iemOp_fcompp)
12913{
12914 IEMOP_MNEMONIC(fcompp, "fcompp");
12915 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
12916}
12917
12918
12919/** Opcode 0xde 11/4. */
12920FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
12921{
12922 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
12923 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
12924}
12925
12926
12927/** Opcode 0xde 11/5. */
12928FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
12929{
12930 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
12931 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
12932}
12933
12934
12935/** Opcode 0xde 11/6. */
12936FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
12937{
12938 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
12939 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
12940}
12941
12942
12943/** Opcode 0xde 11/7. */
12944FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
12945{
12946 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
12947 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
12948}
12949
12950
12951/**
12952 * Common worker for FPU instructions working on ST0 and an m16i, and storing
12953 * the result in ST0.
12954 *
12955 * @param bRm Mod R/M byte.
12956 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12957 */
12958FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
12959{
12960 IEM_MC_BEGIN(3, 3, 0, 0);
12961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12962 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12963 IEM_MC_LOCAL(int16_t, i16Val2);
12964 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12965 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12966 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12967
12968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12970
12971 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12972 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12973 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12974
12975 IEM_MC_PREPARE_FPU_USAGE();
12976 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12977 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
12978 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
12979 } IEM_MC_ELSE() {
12980 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12981 } IEM_MC_ENDIF();
12982 IEM_MC_ADVANCE_RIP_AND_FINISH();
12983
12984 IEM_MC_END();
12985}
12986
12987
12988/** Opcode 0xde !11/0. */
12989FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
12990{
12991 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
12992 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
12993}
12994
12995
12996/** Opcode 0xde !11/1. */
12997FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
12998{
12999 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
13000 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
13001}
13002
13003
13004/** Opcode 0xde !11/2. */
13005FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
13006{
13007 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
13008
13009 IEM_MC_BEGIN(3, 3, 0, 0);
13010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13011 IEM_MC_LOCAL(uint16_t, u16Fsw);
13012 IEM_MC_LOCAL(int16_t, i16Val2);
13013 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13014 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13015 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
13016
13017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13019
13020 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13021 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13022 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13023
13024 IEM_MC_PREPARE_FPU_USAGE();
13025 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
13026 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
13027 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13028 } IEM_MC_ELSE() {
13029 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13030 } IEM_MC_ENDIF();
13031 IEM_MC_ADVANCE_RIP_AND_FINISH();
13032
13033 IEM_MC_END();
13034}
13035
13036
13037/** Opcode 0xde !11/3. */
13038FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
13039{
13040 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
13041
13042 IEM_MC_BEGIN(3, 3, 0, 0);
13043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13044 IEM_MC_LOCAL(uint16_t, u16Fsw);
13045 IEM_MC_LOCAL(int16_t, i16Val2);
13046 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13047 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13048 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
13049
13050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13052
13053 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13054 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13055 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13056
13057 IEM_MC_PREPARE_FPU_USAGE();
13058 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
13059 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
13060 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13061 } IEM_MC_ELSE() {
13062 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13063 } IEM_MC_ENDIF();
13064 IEM_MC_ADVANCE_RIP_AND_FINISH();
13065
13066 IEM_MC_END();
13067}
13068
13069
13070/** Opcode 0xde !11/4. */
13071FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
13072{
13073 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
13074 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
13075}
13076
13077
13078/** Opcode 0xde !11/5. */
13079FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
13080{
13081 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
13082 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
13083}
13084
13085
13086/** Opcode 0xde !11/6. */
13087FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
13088{
13089 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
13090 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
13091}
13092
13093
13094/** Opcode 0xde !11/7. */
13095FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
13096{
13097 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
13098 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
13099}
13100
13101
13102/**
13103 * @opcode 0xde
13104 */
13105FNIEMOP_DEF(iemOp_EscF6)
13106{
13107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13108 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
13109 if (IEM_IS_MODRM_REG_MODE(bRm))
13110 {
13111 switch (IEM_GET_MODRM_REG_8(bRm))
13112 {
13113 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
13114 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
13115 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13116 case 3: if (bRm == 0xd9)
13117 return FNIEMOP_CALL(iemOp_fcompp);
13118 IEMOP_RAISE_INVALID_OPCODE_RET();
13119 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
13120 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
13121 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
13122 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
13123 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13124 }
13125 }
13126 else
13127 {
13128 switch (IEM_GET_MODRM_REG_8(bRm))
13129 {
13130 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
13131 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
13132 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
13133 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
13134 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
13135 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
13136 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
13137 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
13138 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13139 }
13140 }
13141}
13142
13143
13144/** Opcode 0xdf 11/0.
13145 * Undocument instruction, assumed to work like ffree + fincstp. */
13146FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
13147{
13148 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
13149 IEM_MC_BEGIN(0, 0, 0, 0);
13150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13151
13152 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13153 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13154
13155 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13156 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
13157 IEM_MC_FPU_STACK_INC_TOP();
13158 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
13159
13160 IEM_MC_ADVANCE_RIP_AND_FINISH();
13161 IEM_MC_END();
13162}
13163
13164
13165/** Opcode 0xdf 0xe0. */
13166FNIEMOP_DEF(iemOp_fnstsw_ax)
13167{
13168 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
13169 IEM_MC_BEGIN(0, 1, 0, 0);
13170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13171 IEM_MC_LOCAL(uint16_t, u16Tmp);
13172 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13173 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
13174 IEM_MC_FETCH_FSW(u16Tmp);
13175 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
13176 IEM_MC_ADVANCE_RIP_AND_FINISH();
13177 IEM_MC_END();
13178}
13179
13180
13181/** Opcode 0xdf 11/5. */
13182FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
13183{
13184 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
13185 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
13186 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
13187 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
13188}
13189
13190
13191/** Opcode 0xdf 11/6. */
13192FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
13193{
13194 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
13195 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
13196 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
13197 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
13198}
13199
13200
13201/** Opcode 0xdf !11/0. */
13202FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
13203{
13204 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
13205
13206 IEM_MC_BEGIN(2, 3, 0, 0);
13207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13208 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13209 IEM_MC_LOCAL(int16_t, i16Val);
13210 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13211 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
13212
13213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13215
13216 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13217 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13218 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13219
13220 IEM_MC_PREPARE_FPU_USAGE();
13221 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
13222 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
13223 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13224 } IEM_MC_ELSE() {
13225 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13226 } IEM_MC_ENDIF();
13227 IEM_MC_ADVANCE_RIP_AND_FINISH();
13228
13229 IEM_MC_END();
13230}
13231
13232
13233/** Opcode 0xdf !11/1. */
13234FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
13235{
13236 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
13237 IEM_MC_BEGIN(3, 3, 0, 0);
13238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13240
13241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13242 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13243 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13244 IEM_MC_PREPARE_FPU_USAGE();
13245
13246 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13247 IEM_MC_ARG(int16_t *, pi16Dst, 1);
13248 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13249
13250 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13251 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13252 IEM_MC_LOCAL(uint16_t, u16Fsw);
13253 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13254 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
13255 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13256 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13257 } IEM_MC_ELSE() {
13258 IEM_MC_IF_FCW_IM() {
13259 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
13260 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13261 } IEM_MC_ELSE() {
13262 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13263 } IEM_MC_ENDIF();
13264 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13265 } IEM_MC_ENDIF();
13266 IEM_MC_ADVANCE_RIP_AND_FINISH();
13267
13268 IEM_MC_END();
13269}
13270
13271
13272/** Opcode 0xdf !11/2. */
13273FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
13274{
13275 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
13276 IEM_MC_BEGIN(3, 3, 0, 0);
13277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13279
13280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13281 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13282 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13283 IEM_MC_PREPARE_FPU_USAGE();
13284
13285 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13286 IEM_MC_ARG(int16_t *, pi16Dst, 1);
13287 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13288
13289 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13290 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13291 IEM_MC_LOCAL(uint16_t, u16Fsw);
13292 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13293 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
13294 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13295 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13296 } IEM_MC_ELSE() {
13297 IEM_MC_IF_FCW_IM() {
13298 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
13299 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13300 } IEM_MC_ELSE() {
13301 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13302 } IEM_MC_ENDIF();
13303 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13304 } IEM_MC_ENDIF();
13305 IEM_MC_ADVANCE_RIP_AND_FINISH();
13306
13307 IEM_MC_END();
13308}
13309
13310
13311/** Opcode 0xdf !11/3. */
13312FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
13313{
13314 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
13315 IEM_MC_BEGIN(3, 3, 0, 0);
13316 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13318
13319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13320 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13321 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13322 IEM_MC_PREPARE_FPU_USAGE();
13323
13324 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13325 IEM_MC_ARG(int16_t *, pi16Dst, 1);
13326 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13327
13328 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13329 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13330 IEM_MC_LOCAL(uint16_t, u16Fsw);
13331 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13332 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
13333 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13334 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13335 } IEM_MC_ELSE() {
13336 IEM_MC_IF_FCW_IM() {
13337 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
13338 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13339 } IEM_MC_ELSE() {
13340 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13341 } IEM_MC_ENDIF();
13342 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13343 } IEM_MC_ENDIF();
13344 IEM_MC_ADVANCE_RIP_AND_FINISH();
13345
13346 IEM_MC_END();
13347}
13348
13349
13350/** Opcode 0xdf !11/4. */
13351FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
13352{
13353 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
13354
13355 IEM_MC_BEGIN(2, 3, 0, 0);
13356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13357 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13358 IEM_MC_LOCAL(RTPBCD80U, d80Val);
13359 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13360 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
13361
13362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13364
13365 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13366 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13367 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13368
13369 IEM_MC_PREPARE_FPU_USAGE();
13370 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
13371 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
13372 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13373 } IEM_MC_ELSE() {
13374 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13375 } IEM_MC_ENDIF();
13376 IEM_MC_ADVANCE_RIP_AND_FINISH();
13377
13378 IEM_MC_END();
13379}
13380
13381
13382/** Opcode 0xdf !11/5. */
13383FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
13384{
13385 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
13386
13387 IEM_MC_BEGIN(2, 3, 0, 0);
13388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13389 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13390 IEM_MC_LOCAL(int64_t, i64Val);
13391 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13392 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
13393
13394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13396
13397 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13398 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13399 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13400
13401 IEM_MC_PREPARE_FPU_USAGE();
13402 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
13403 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
13404 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13405 } IEM_MC_ELSE() {
13406 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13407 } IEM_MC_ENDIF();
13408 IEM_MC_ADVANCE_RIP_AND_FINISH();
13409
13410 IEM_MC_END();
13411}
13412
13413
13414/** Opcode 0xdf !11/6. */
13415FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
13416{
13417 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
13418 IEM_MC_BEGIN(3, 3, 0, 0);
13419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13421
13422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13423 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13424 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13425 IEM_MC_PREPARE_FPU_USAGE();
13426
13427 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13428 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
13429 IEM_MC_MEM_MAP_D80_WO(pd80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13430
13431 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13432 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13433 IEM_MC_LOCAL(uint16_t, u16Fsw);
13434 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13435 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
13436 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13437 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13438 } IEM_MC_ELSE() {
13439 IEM_MC_IF_FCW_IM() {
13440 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
13441 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13442 } IEM_MC_ELSE() {
13443 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13444 } IEM_MC_ENDIF();
13445 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13446 } IEM_MC_ENDIF();
13447 IEM_MC_ADVANCE_RIP_AND_FINISH();
13448
13449 IEM_MC_END();
13450}
13451
13452
13453/** Opcode 0xdf !11/7. */
13454FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
13455{
13456 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
13457 IEM_MC_BEGIN(3, 3, 0, 0);
13458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13460
13461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13462 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13463 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13464 IEM_MC_PREPARE_FPU_USAGE();
13465
13466 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13467 IEM_MC_ARG(int64_t *, pi64Dst, 1);
13468 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13469
13470 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13471 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13472 IEM_MC_LOCAL(uint16_t, u16Fsw);
13473 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13474 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
13475 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13476 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13477 } IEM_MC_ELSE() {
13478 IEM_MC_IF_FCW_IM() {
13479 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
13480 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13481 } IEM_MC_ELSE() {
13482 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13483 } IEM_MC_ENDIF();
13484 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13485 } IEM_MC_ENDIF();
13486 IEM_MC_ADVANCE_RIP_AND_FINISH();
13487
13488 IEM_MC_END();
13489}
13490
13491
13492/**
13493 * @opcode 0xdf
13494 */
13495FNIEMOP_DEF(iemOp_EscF7)
13496{
13497 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13498 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
13499 if (IEM_IS_MODRM_REG_MODE(bRm))
13500 {
13501 switch (IEM_GET_MODRM_REG_8(bRm))
13502 {
13503 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
13504 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
13505 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13506 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13507 case 4: if (bRm == 0xe0)
13508 return FNIEMOP_CALL(iemOp_fnstsw_ax);
13509 IEMOP_RAISE_INVALID_OPCODE_RET();
13510 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
13511 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
13512 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
13513 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13514 }
13515 }
13516 else
13517 {
13518 switch (IEM_GET_MODRM_REG_8(bRm))
13519 {
13520 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
13521 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
13522 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
13523 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
13524 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
13525 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
13526 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
13527 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
13528 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13529 }
13530 }
13531}
13532
13533
13534/**
13535 * @opcode 0xe0
13536 * @opfltest zf
13537 */
13538FNIEMOP_DEF(iemOp_loopne_Jb)
13539{
13540 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
13541 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13542 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13543
13544 switch (pVCpu->iem.s.enmEffAddrMode)
13545 {
13546 case IEMMODE_16BIT:
13547 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13549 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13550 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13551 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13552 } IEM_MC_ELSE() {
13553 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13554 IEM_MC_ADVANCE_RIP_AND_FINISH();
13555 } IEM_MC_ENDIF();
13556 IEM_MC_END();
13557 break;
13558
13559 case IEMMODE_32BIT:
13560 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13562 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13563 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13564 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13565 } IEM_MC_ELSE() {
13566 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13567 IEM_MC_ADVANCE_RIP_AND_FINISH();
13568 } IEM_MC_ENDIF();
13569 IEM_MC_END();
13570 break;
13571
13572 case IEMMODE_64BIT:
13573 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13575 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13576 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13577 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13578 } IEM_MC_ELSE() {
13579 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13580 IEM_MC_ADVANCE_RIP_AND_FINISH();
13581 } IEM_MC_ENDIF();
13582 IEM_MC_END();
13583 break;
13584
13585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13586 }
13587}
13588
13589
13590/**
13591 * @opcode 0xe1
13592 * @opfltest zf
13593 */
13594FNIEMOP_DEF(iemOp_loope_Jb)
13595{
13596 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
13597 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13598 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13599
13600 switch (pVCpu->iem.s.enmEffAddrMode)
13601 {
13602 case IEMMODE_16BIT:
13603 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13605 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13606 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13607 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13608 } IEM_MC_ELSE() {
13609 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13610 IEM_MC_ADVANCE_RIP_AND_FINISH();
13611 } IEM_MC_ENDIF();
13612 IEM_MC_END();
13613 break;
13614
13615 case IEMMODE_32BIT:
13616 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13618 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13619 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13620 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13621 } IEM_MC_ELSE() {
13622 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13623 IEM_MC_ADVANCE_RIP_AND_FINISH();
13624 } IEM_MC_ENDIF();
13625 IEM_MC_END();
13626 break;
13627
13628 case IEMMODE_64BIT:
13629 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13631 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13632 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13633 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13634 } IEM_MC_ELSE() {
13635 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13636 IEM_MC_ADVANCE_RIP_AND_FINISH();
13637 } IEM_MC_ENDIF();
13638 IEM_MC_END();
13639 break;
13640
13641 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13642 }
13643}
13644
13645
13646/**
13647 * @opcode 0xe2
13648 */
13649FNIEMOP_DEF(iemOp_loop_Jb)
13650{
13651 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
13652 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13653 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13654
13655 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
13656 * using the 32-bit operand size override. How can that be restarted? See
13657 * weird pseudo code in intel manual. */
13658
13659 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
13660 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
13661 * the loop causes guest crashes, but when logging it's nice to skip a few million
13662 * lines of useless output. */
13663#if defined(LOG_ENABLED)
13664 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
13665 switch (pVCpu->iem.s.enmEffAddrMode)
13666 {
13667 case IEMMODE_16BIT:
13668 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13670 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13671 IEM_MC_ADVANCE_RIP_AND_FINISH();
13672 IEM_MC_END();
13673 break;
13674
13675 case IEMMODE_32BIT:
13676 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13678 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13679 IEM_MC_ADVANCE_RIP_AND_FINISH();
13680 IEM_MC_END();
13681 break;
13682
13683 case IEMMODE_64BIT:
13684 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13686 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13687 IEM_MC_ADVANCE_RIP_AND_FINISH();
13688 IEM_MC_END();
13689 break;
13690
13691 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13692 }
13693#endif
13694
13695 switch (pVCpu->iem.s.enmEffAddrMode)
13696 {
13697 case IEMMODE_16BIT:
13698 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13700 IEM_MC_IF_CX_IS_NOT_ONE() {
13701 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13702 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13703 } IEM_MC_ELSE() {
13704 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13705 IEM_MC_ADVANCE_RIP_AND_FINISH();
13706 } IEM_MC_ENDIF();
13707 IEM_MC_END();
13708 break;
13709
13710 case IEMMODE_32BIT:
13711 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13713 IEM_MC_IF_ECX_IS_NOT_ONE() {
13714 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13715 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13716 } IEM_MC_ELSE() {
13717 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13718 IEM_MC_ADVANCE_RIP_AND_FINISH();
13719 } IEM_MC_ENDIF();
13720 IEM_MC_END();
13721 break;
13722
13723 case IEMMODE_64BIT:
13724 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13726 IEM_MC_IF_RCX_IS_NOT_ONE() {
13727 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13728 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13729 } IEM_MC_ELSE() {
13730 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13731 IEM_MC_ADVANCE_RIP_AND_FINISH();
13732 } IEM_MC_ENDIF();
13733 IEM_MC_END();
13734 break;
13735
13736 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13737 }
13738}
13739
13740
13741/**
13742 * @opcode 0xe3
13743 */
13744FNIEMOP_DEF(iemOp_jecxz_Jb)
13745{
13746 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
13747 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13748 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13749
13750 switch (pVCpu->iem.s.enmEffAddrMode)
13751 {
13752 case IEMMODE_16BIT:
13753 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13755 IEM_MC_IF_CX_IS_NZ() {
13756 IEM_MC_ADVANCE_RIP_AND_FINISH();
13757 } IEM_MC_ELSE() {
13758 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13759 } IEM_MC_ENDIF();
13760 IEM_MC_END();
13761 break;
13762
13763 case IEMMODE_32BIT:
13764 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13766 IEM_MC_IF_ECX_IS_NZ() {
13767 IEM_MC_ADVANCE_RIP_AND_FINISH();
13768 } IEM_MC_ELSE() {
13769 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13770 } IEM_MC_ENDIF();
13771 IEM_MC_END();
13772 break;
13773
13774 case IEMMODE_64BIT:
13775 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13777 IEM_MC_IF_RCX_IS_NZ() {
13778 IEM_MC_ADVANCE_RIP_AND_FINISH();
13779 } IEM_MC_ELSE() {
13780 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13781 } IEM_MC_ENDIF();
13782 IEM_MC_END();
13783 break;
13784
13785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13786 }
13787}
13788
13789
13790/**
13791 * @opcode 0xe4
13792 * @opfltest iopl
13793 */
13794FNIEMOP_DEF(iemOp_in_AL_Ib)
13795{
13796 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
13797 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13799 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13800 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13801}
13802
13803
13804/**
13805 * @opcode 0xe5
13806 * @opfltest iopl
13807 */
13808FNIEMOP_DEF(iemOp_in_eAX_Ib)
13809{
13810 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
13811 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13813 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13814 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13815 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13816}
13817
13818
13819/**
13820 * @opcode 0xe6
13821 * @opfltest iopl
13822 */
13823FNIEMOP_DEF(iemOp_out_Ib_AL)
13824{
13825 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
13826 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13828 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13829 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13830}
13831
13832
13833/**
13834 * @opcode 0xe7
13835 * @opfltest iopl
13836 */
13837FNIEMOP_DEF(iemOp_out_Ib_eAX)
13838{
13839 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
13840 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13842 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13843 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13844 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13845}
13846
13847
13848/**
13849 * @opcode 0xe8
13850 */
13851FNIEMOP_DEF(iemOp_call_Jv)
13852{
13853 IEMOP_MNEMONIC(call_Jv, "call Jv");
13854 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13855 switch (pVCpu->iem.s.enmEffOpSize)
13856 {
13857 case IEMMODE_16BIT:
13858 {
13859 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13860 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13861 iemCImpl_call_rel_16, (int16_t)u16Imm);
13862 }
13863
13864 case IEMMODE_32BIT:
13865 {
13866 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13867 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13868 iemCImpl_call_rel_32, (int32_t)u32Imm);
13869 }
13870
13871 case IEMMODE_64BIT:
13872 {
13873 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13874 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13875 iemCImpl_call_rel_64, u64Imm);
13876 }
13877
13878 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13879 }
13880}
13881
13882
13883/**
13884 * @opcode 0xe9
13885 */
13886FNIEMOP_DEF(iemOp_jmp_Jv)
13887{
13888 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
13889 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13890 switch (pVCpu->iem.s.enmEffOpSize)
13891 {
13892 case IEMMODE_16BIT:
13893 IEM_MC_BEGIN(0, 0, 0, 0);
13894 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
13895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13896 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
13897 IEM_MC_END();
13898 break;
13899
13900 case IEMMODE_64BIT:
13901 case IEMMODE_32BIT:
13902 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13903 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
13904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13905 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
13906 IEM_MC_END();
13907 break;
13908
13909 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13910 }
13911}
13912
13913
13914/**
13915 * @opcode 0xea
13916 */
13917FNIEMOP_DEF(iemOp_jmp_Ap)
13918{
13919 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
13920 IEMOP_HLP_NO_64BIT();
13921
13922 /* Decode the far pointer address and pass it on to the far call C implementation. */
13923 uint32_t off32Seg;
13924 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
13925 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
13926 else
13927 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
13928 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
13929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13930 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
13931 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
13932 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
13933 /** @todo make task-switches, ring-switches, ++ return non-zero status */
13934}
13935
13936
13937/**
13938 * @opcode 0xeb
13939 */
13940FNIEMOP_DEF(iemOp_jmp_Jb)
13941{
13942 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
13943 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13944 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13945
13946 IEM_MC_BEGIN(0, 0, 0, 0);
13947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13948 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13949 IEM_MC_END();
13950}
13951
13952
13953/**
13954 * @opcode 0xec
13955 * @opfltest iopl
13956 */
13957FNIEMOP_DEF(iemOp_in_AL_DX)
13958{
13959 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
13960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13961 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
13962 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13963 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
13964}
13965
13966
13967/**
13968 * @opcode 0xed
13969 * @opfltest iopl
13970 */
13971FNIEMOP_DEF(iemOp_in_eAX_DX)
13972{
13973 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
13974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13975 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
13976 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13977 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13978 pVCpu->iem.s.enmEffAddrMode);
13979}
13980
13981
13982/**
13983 * @opcode 0xee
13984 * @opfltest iopl
13985 */
13986FNIEMOP_DEF(iemOp_out_DX_AL)
13987{
13988 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
13989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13990 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13991 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
13992}
13993
13994
13995/**
13996 * @opcode 0xef
13997 * @opfltest iopl
13998 */
13999FNIEMOP_DEF(iemOp_out_DX_eAX)
14000{
14001 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
14002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14003 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
14004 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
14005 pVCpu->iem.s.enmEffAddrMode);
14006}
14007
14008
14009/**
14010 * @opcode 0xf0
14011 */
14012FNIEMOP_DEF(iemOp_lock)
14013{
14014 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
14015 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
14016
14017 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14018 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14019}
14020
14021
14022/**
14023 * @opcode 0xf1
14024 */
14025FNIEMOP_DEF(iemOp_int1)
14026{
14027 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
14028 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
14029 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
14030 * LOADALL memo. Needs some testing. */
14031 IEMOP_HLP_MIN_386();
14032 /** @todo testcase! */
14033 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
14034 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
14035 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
14036}
14037
14038
14039/**
14040 * @opcode 0xf2
14041 */
14042FNIEMOP_DEF(iemOp_repne)
14043{
14044 /* This overrides any previous REPE prefix. */
14045 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
14046 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
14047 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
14048
14049 /* For the 4 entry opcode tables, REPNZ overrides any previous
14050 REPZ and operand size prefixes. */
14051 pVCpu->iem.s.idxPrefix = 3;
14052
14053 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14054 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14055}
14056
14057
14058/**
14059 * @opcode 0xf3
14060 */
14061FNIEMOP_DEF(iemOp_repe)
14062{
14063 /* This overrides any previous REPNE prefix. */
14064 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
14065 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
14066 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
14067
14068 /* For the 4 entry opcode tables, REPNZ overrides any previous
14069 REPNZ and operand size prefixes. */
14070 pVCpu->iem.s.idxPrefix = 2;
14071
14072 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14073 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14074}
14075
14076
14077/**
14078 * @opcode 0xf4
14079 */
14080FNIEMOP_DEF(iemOp_hlt)
14081{
14082 IEMOP_MNEMONIC(hlt, "hlt");
14083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14084 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_hlt);
14085}
14086
14087
14088/**
14089 * @opcode 0xf5
14090 * @opflmodify cf
14091 */
14092FNIEMOP_DEF(iemOp_cmc)
14093{
14094 IEMOP_MNEMONIC(cmc, "cmc");
14095 IEM_MC_BEGIN(0, 0, 0, 0);
14096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14097 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
14098 IEM_MC_ADVANCE_RIP_AND_FINISH();
14099 IEM_MC_END();
14100}
14101
14102
14103/**
14104 * Body for of 'inc/dec/not/neg Eb'.
14105 */
14106#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
14107 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
14108 { \
14109 /* register access */ \
14110 IEM_MC_BEGIN(2, 0, 0, 0); \
14111 IEMOP_HLP_DONE_DECODING(); \
14112 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
14113 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
14114 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
14115 IEM_MC_REF_EFLAGS(pEFlags); \
14116 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
14117 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14118 IEM_MC_END(); \
14119 } \
14120 else \
14121 { \
14122 /* memory access. */ \
14123 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
14124 { \
14125 IEM_MC_BEGIN(2, 2, 0, 0); \
14126 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
14127 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14129 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14130 \
14131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
14132 IEMOP_HLP_DONE_DECODING(); \
14133 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14134 IEM_MC_FETCH_EFLAGS(EFlags); \
14135 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
14136 \
14137 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
14138 IEM_MC_COMMIT_EFLAGS(EFlags); \
14139 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14140 IEM_MC_END(); \
14141 } \
14142 else \
14143 { \
14144 IEM_MC_BEGIN(2, 2, 0, 0); \
14145 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
14146 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14148 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14149 \
14150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
14151 IEMOP_HLP_DONE_DECODING(); \
14152 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14153 IEM_MC_FETCH_EFLAGS(EFlags); \
14154 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
14155 \
14156 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14157 IEM_MC_COMMIT_EFLAGS(EFlags); \
14158 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14159 IEM_MC_END(); \
14160 } \
14161 } \
14162 (void)0
14163
14164
14165/**
14166 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
14167 */
14168#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
14169 if (IEM_IS_MODRM_REG_MODE(bRm)) \
14170 { \
14171 /* \
14172 * Register target \
14173 */ \
14174 switch (pVCpu->iem.s.enmEffOpSize) \
14175 { \
14176 case IEMMODE_16BIT: \
14177 IEM_MC_BEGIN(2, 0, 0, 0); \
14178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14179 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
14180 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
14181 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14182 IEM_MC_REF_EFLAGS(pEFlags); \
14183 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
14184 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14185 IEM_MC_END(); \
14186 break; \
14187 \
14188 case IEMMODE_32BIT: \
14189 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0); \
14190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14191 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
14192 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
14193 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14194 IEM_MC_REF_EFLAGS(pEFlags); \
14195 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
14196 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
14197 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14198 IEM_MC_END(); \
14199 break; \
14200 \
14201 case IEMMODE_64BIT: \
14202 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0); \
14203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14204 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
14205 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
14206 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14207 IEM_MC_REF_EFLAGS(pEFlags); \
14208 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
14209 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14210 IEM_MC_END(); \
14211 break; \
14212 \
14213 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14214 } \
14215 } \
14216 else \
14217 { \
14218 /* \
14219 * Memory target. \
14220 */ \
14221 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
14222 { \
14223 switch (pVCpu->iem.s.enmEffOpSize) \
14224 { \
14225 case IEMMODE_16BIT: \
14226 IEM_MC_BEGIN(2, 3, 0, 0); \
14227 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
14228 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14230 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14231 \
14232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14233 IEMOP_HLP_DONE_DECODING(); \
14234 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14235 IEM_MC_FETCH_EFLAGS(EFlags); \
14236 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
14237 \
14238 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
14239 IEM_MC_COMMIT_EFLAGS(EFlags); \
14240 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14241 IEM_MC_END(); \
14242 break; \
14243 \
14244 case IEMMODE_32BIT: \
14245 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
14246 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
14247 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14248 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14249 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14250 \
14251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14252 IEMOP_HLP_DONE_DECODING(); \
14253 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14254 IEM_MC_FETCH_EFLAGS(EFlags); \
14255 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
14256 \
14257 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
14258 IEM_MC_COMMIT_EFLAGS(EFlags); \
14259 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14260 IEM_MC_END(); \
14261 break; \
14262 \
14263 case IEMMODE_64BIT: \
14264 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
14265 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
14266 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14268 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14269 \
14270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14271 IEMOP_HLP_DONE_DECODING(); \
14272 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14273 IEM_MC_FETCH_EFLAGS(EFlags); \
14274 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
14275 \
14276 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
14277 IEM_MC_COMMIT_EFLAGS(EFlags); \
14278 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14279 IEM_MC_END(); \
14280 break; \
14281 \
14282 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14283 } \
14284 } \
14285 else \
14286 { \
14287 (void)0
14288
14289#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
14290 switch (pVCpu->iem.s.enmEffOpSize) \
14291 { \
14292 case IEMMODE_16BIT: \
14293 IEM_MC_BEGIN(2, 3, 0, 0); \
14294 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
14295 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14297 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14298 \
14299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14300 IEMOP_HLP_DONE_DECODING(); \
14301 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14302 IEM_MC_FETCH_EFLAGS(EFlags); \
14303 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
14304 \
14305 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14306 IEM_MC_COMMIT_EFLAGS(EFlags); \
14307 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14308 IEM_MC_END(); \
14309 break; \
14310 \
14311 case IEMMODE_32BIT: \
14312 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
14313 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
14314 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14316 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14317 \
14318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14319 IEMOP_HLP_DONE_DECODING(); \
14320 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14321 IEM_MC_FETCH_EFLAGS(EFlags); \
14322 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
14323 \
14324 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14325 IEM_MC_COMMIT_EFLAGS(EFlags); \
14326 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14327 IEM_MC_END(); \
14328 break; \
14329 \
14330 case IEMMODE_64BIT: \
14331 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
14332 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
14333 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14334 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14335 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14336 \
14337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14338 IEMOP_HLP_DONE_DECODING(); \
14339 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14340 IEM_MC_FETCH_EFLAGS(EFlags); \
14341 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
14342 \
14343 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14344 IEM_MC_COMMIT_EFLAGS(EFlags); \
14345 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14346 IEM_MC_END(); \
14347 break; \
14348 \
14349 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14350 } \
14351 } \
14352 } \
14353 (void)0
14354
14355
14356/**
14357 * @opmaps grp3_f6
14358 * @opcode /0
14359 * @opflclass logical
14360 * @todo also /1
14361 */
14362FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
14363{
14364 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
14365 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14366
14367 if (IEM_IS_MODRM_REG_MODE(bRm))
14368 {
14369 /* register access */
14370 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14371 IEM_MC_BEGIN(3, 0, 0, 0);
14372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14373 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14374 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
14375 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14376 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14377 IEM_MC_REF_EFLAGS(pEFlags);
14378 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
14379 IEM_MC_ADVANCE_RIP_AND_FINISH();
14380 IEM_MC_END();
14381 }
14382 else
14383 {
14384 /* memory access. */
14385 IEM_MC_BEGIN(3, 3, 0, 0);
14386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
14388
14389 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14391
14392 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14393 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
14394 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14395
14396 IEM_MC_ARG_CONST(uint8_t, u8Src, u8Imm, 1);
14397 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14398 IEM_MC_FETCH_EFLAGS(EFlags);
14399 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
14400
14401 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14402 IEM_MC_COMMIT_EFLAGS(EFlags);
14403 IEM_MC_ADVANCE_RIP_AND_FINISH();
14404 IEM_MC_END();
14405 }
14406}
14407
14408
14409/* Body for opcode 0xf6 variations /4, /5, /6 and /7. */
14410#define IEMOP_GRP3_MUL_DIV_EB(bRm, a_pfnU8Expr) \
14411 PFNIEMAIMPLMULDIVU8 const pfnU8 = (a_pfnU8Expr); \
14412 if (IEM_IS_MODRM_REG_MODE(bRm)) \
14413 { \
14414 /* register access */ \
14415 IEM_MC_BEGIN(3, 1, 0, 0); \
14416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14417 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14418 IEM_MC_ARG(uint8_t, u8Value, 1); \
14419 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
14420 IEM_MC_LOCAL(int32_t, rc); \
14421 \
14422 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14423 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14424 IEM_MC_REF_EFLAGS(pEFlags); \
14425 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags); \
14426 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14427 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14428 } IEM_MC_ELSE() { \
14429 IEM_MC_RAISE_DIVIDE_ERROR(); \
14430 } IEM_MC_ENDIF(); \
14431 \
14432 IEM_MC_END(); \
14433 } \
14434 else \
14435 { \
14436 /* memory access. */ \
14437 IEM_MC_BEGIN(3, 2, 0, 0); \
14438 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14439 IEM_MC_ARG(uint8_t, u8Value, 1); \
14440 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
14441 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14442 IEM_MC_LOCAL(int32_t, rc); \
14443 \
14444 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14446 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14447 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14448 IEM_MC_REF_EFLAGS(pEFlags); \
14449 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags); \
14450 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14451 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14452 } IEM_MC_ELSE() { \
14453 IEM_MC_RAISE_DIVIDE_ERROR(); \
14454 } IEM_MC_ENDIF(); \
14455 \
14456 IEM_MC_END(); \
14457 } (void)0
14458
14459
14460/* Body for opcode 0xf7 variant /4, /5, /6 and /7. */
14461#define IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, a_pImplExpr) \
14462 PCIEMOPMULDIVSIZES const pImpl = (a_pImplExpr); \
14463 if (IEM_IS_MODRM_REG_MODE(bRm)) \
14464 { \
14465 /* register access */ \
14466 switch (pVCpu->iem.s.enmEffOpSize) \
14467 { \
14468 case IEMMODE_16BIT: \
14469 IEM_MC_BEGIN(4, 1, 0, 0); \
14470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14471 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14472 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
14473 IEM_MC_ARG(uint16_t, u16Value, 2); \
14474 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14475 IEM_MC_LOCAL(int32_t, rc); \
14476 \
14477 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14478 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14479 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
14480 IEM_MC_REF_EFLAGS(pEFlags); \
14481 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
14482 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14483 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14484 } IEM_MC_ELSE() { \
14485 IEM_MC_RAISE_DIVIDE_ERROR(); \
14486 } IEM_MC_ENDIF(); \
14487 \
14488 IEM_MC_END(); \
14489 break; \
14490 \
14491 case IEMMODE_32BIT: \
14492 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386, 0); \
14493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14494 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
14495 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
14496 IEM_MC_ARG(uint32_t, u32Value, 2); \
14497 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14498 IEM_MC_LOCAL(int32_t, rc); \
14499 \
14500 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14501 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
14502 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
14503 IEM_MC_REF_EFLAGS(pEFlags); \
14504 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
14505 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14506 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
14507 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
14508 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14509 } IEM_MC_ELSE() { \
14510 IEM_MC_RAISE_DIVIDE_ERROR(); \
14511 } IEM_MC_ENDIF(); \
14512 \
14513 IEM_MC_END(); \
14514 break; \
14515 \
14516 case IEMMODE_64BIT: \
14517 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0); \
14518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14519 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
14520 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
14521 IEM_MC_ARG(uint64_t, u64Value, 2); \
14522 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14523 IEM_MC_LOCAL(int32_t, rc); \
14524 \
14525 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14526 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
14527 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
14528 IEM_MC_REF_EFLAGS(pEFlags); \
14529 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
14530 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14531 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14532 } IEM_MC_ELSE() { \
14533 IEM_MC_RAISE_DIVIDE_ERROR(); \
14534 } IEM_MC_ENDIF(); \
14535 \
14536 IEM_MC_END(); \
14537 break; \
14538 \
14539 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14540 } \
14541 } \
14542 else \
14543 { \
14544 /* memory access. */ \
14545 switch (pVCpu->iem.s.enmEffOpSize) \
14546 { \
14547 case IEMMODE_16BIT: \
14548 IEM_MC_BEGIN(4, 2, 0, 0); \
14549 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14550 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
14551 IEM_MC_ARG(uint16_t, u16Value, 2); \
14552 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14554 IEM_MC_LOCAL(int32_t, rc); \
14555 \
14556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14558 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14559 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14560 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
14561 IEM_MC_REF_EFLAGS(pEFlags); \
14562 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
14563 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14564 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14565 } IEM_MC_ELSE() { \
14566 IEM_MC_RAISE_DIVIDE_ERROR(); \
14567 } IEM_MC_ENDIF(); \
14568 \
14569 IEM_MC_END(); \
14570 break; \
14571 \
14572 case IEMMODE_32BIT: \
14573 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386, 0); \
14574 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
14575 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
14576 IEM_MC_ARG(uint32_t, u32Value, 2); \
14577 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14579 IEM_MC_LOCAL(int32_t, rc); \
14580 \
14581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14583 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14584 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
14585 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
14586 IEM_MC_REF_EFLAGS(pEFlags); \
14587 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
14588 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14589 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
14590 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
14591 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14592 } IEM_MC_ELSE() { \
14593 IEM_MC_RAISE_DIVIDE_ERROR(); \
14594 } IEM_MC_ENDIF(); \
14595 \
14596 IEM_MC_END(); \
14597 break; \
14598 \
14599 case IEMMODE_64BIT: \
14600 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT, 0); \
14601 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
14602 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
14603 IEM_MC_ARG(uint64_t, u64Value, 2); \
14604 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14606 IEM_MC_LOCAL(int32_t, rc); \
14607 \
14608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14610 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14611 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
14612 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
14613 IEM_MC_REF_EFLAGS(pEFlags); \
14614 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
14615 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14616 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14617 } IEM_MC_ELSE() { \
14618 IEM_MC_RAISE_DIVIDE_ERROR(); \
14619 } IEM_MC_ENDIF(); \
14620 \
14621 IEM_MC_END(); \
14622 break; \
14623 \
14624 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14625 } \
14626 } (void)0
14627
14628
14629/**
14630 * @opmaps grp3_f6
14631 * @opcode /2
14632 * @opflclass unchanged
14633 */
14634FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
14635{
14636/** @todo does not modify EFLAGS. */
14637 IEMOP_MNEMONIC(not_Eb, "not Eb");
14638 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
14639}
14640
14641
14642/**
14643 * @opmaps grp3_f6
14644 * @opcode /3
14645 * @opflclass arithmetic
14646 */
14647FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
14648{
14649 IEMOP_MNEMONIC(net_Eb, "neg Eb");
14650 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
14651}
14652
14653
14654/**
14655 * @opcode 0xf6
14656 */
14657FNIEMOP_DEF(iemOp_Grp3_Eb)
14658{
14659 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14660 switch (IEM_GET_MODRM_REG_8(bRm))
14661 {
14662 case 0:
14663 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
14664 case 1:
14665 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
14666 case 2:
14667 return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
14668 case 3:
14669 return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
14670 case 4:
14671 {
14672 /**
14673 * @opdone
14674 * @opmaps grp3_f6
14675 * @opcode /4
14676 * @opflclass multiply
14677 */
14678 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
14679 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14680 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
14681 break;
14682 }
14683 case 5:
14684 {
14685 /**
14686 * @opdone
14687 * @opmaps grp3_f6
14688 * @opcode /5
14689 * @opflclass multiply
14690 */
14691 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
14692 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14693 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
14694 break;
14695 }
14696 case 6:
14697 {
14698 /**
14699 * @opdone
14700 * @opmaps grp3_f6
14701 * @opcode /6
14702 * @opflclass division
14703 */
14704 IEMOP_MNEMONIC(div_Eb, "div Eb");
14705 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14706 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
14707 break;
14708 }
14709 case 7:
14710 {
14711 /**
14712 * @opdone
14713 * @opmaps grp3_f6
14714 * @opcode /7
14715 * @opflclass division
14716 */
14717 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
14718 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14719 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
14720 break;
14721 }
14722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14723 }
14724}
14725
14726
14727/**
14728 * @opmaps grp3_f7
14729 * @opcode /0
14730 * @opflclass logical
14731 */
14732FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
14733{
14734 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
14735 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14736
14737 if (IEM_IS_MODRM_REG_MODE(bRm))
14738 {
14739 /* register access */
14740 switch (pVCpu->iem.s.enmEffOpSize)
14741 {
14742 case IEMMODE_16BIT:
14743 IEM_MC_BEGIN(3, 0, 0, 0);
14744 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14746 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14747 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
14748 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14749 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14750 IEM_MC_REF_EFLAGS(pEFlags);
14751 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14752 IEM_MC_ADVANCE_RIP_AND_FINISH();
14753 IEM_MC_END();
14754 break;
14755
14756 case IEMMODE_32BIT:
14757 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
14758 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14760 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14761 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
14762 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14763 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14764 IEM_MC_REF_EFLAGS(pEFlags);
14765 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14766 /* No clearing the high dword here - test doesn't write back the result. */
14767 IEM_MC_ADVANCE_RIP_AND_FINISH();
14768 IEM_MC_END();
14769 break;
14770
14771 case IEMMODE_64BIT:
14772 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
14773 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14775 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14776 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
14777 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14778 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14779 IEM_MC_REF_EFLAGS(pEFlags);
14780 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14781 IEM_MC_ADVANCE_RIP_AND_FINISH();
14782 IEM_MC_END();
14783 break;
14784
14785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14786 }
14787 }
14788 else
14789 {
14790 /* memory access. */
14791 switch (pVCpu->iem.s.enmEffOpSize)
14792 {
14793 case IEMMODE_16BIT:
14794 IEM_MC_BEGIN(3, 3, 0, 0);
14795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
14797
14798 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14800
14801 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14802 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
14803 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14804
14805 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
14806 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14807 IEM_MC_FETCH_EFLAGS(EFlags);
14808 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14809
14810 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14811 IEM_MC_COMMIT_EFLAGS(EFlags);
14812 IEM_MC_ADVANCE_RIP_AND_FINISH();
14813 IEM_MC_END();
14814 break;
14815
14816 case IEMMODE_32BIT:
14817 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
14818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
14820
14821 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14823
14824 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14825 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
14826 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14827
14828 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
14829 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14830 IEM_MC_FETCH_EFLAGS(EFlags);
14831 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14832
14833 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14834 IEM_MC_COMMIT_EFLAGS(EFlags);
14835 IEM_MC_ADVANCE_RIP_AND_FINISH();
14836 IEM_MC_END();
14837 break;
14838
14839 case IEMMODE_64BIT:
14840 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
14841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
14843
14844 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14846
14847 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
14848 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14849 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14850
14851 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1);
14852 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14853 IEM_MC_FETCH_EFLAGS(EFlags);
14854 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14855
14856 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14857 IEM_MC_COMMIT_EFLAGS(EFlags);
14858 IEM_MC_ADVANCE_RIP_AND_FINISH();
14859 IEM_MC_END();
14860 break;
14861
14862 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14863 }
14864 }
14865}
14866
14867
14868/**
14869 * @opmaps grp3_f7
14870 * @opcode /2
14871 * @opflclass unchanged
14872 */
14873FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
14874{
14875/** @todo does not modify EFLAGS */
14876 IEMOP_MNEMONIC(not_Ev, "not Ev");
14877 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
14878 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
14879}
14880
14881
14882/**
14883 * @opmaps grp3_f7
14884 * @opcode /3
14885 * @opflclass arithmetic
14886 */
14887FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
14888{
14889 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
14890 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
14891 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
14892}
14893
14894
14895/**
14896 * @opmaps grp3_f7
14897 * @opcode /4
14898 * @opflclass multiply
14899 */
14900FNIEMOP_DEF_1(iemOp_grp3_mul_Ev, uint8_t, bRm)
14901{
14902 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
14903 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14904 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
14905}
14906
14907
14908/**
14909 * @opmaps grp3_f7
14910 * @opcode /5
14911 * @opflclass multiply
14912 */
14913FNIEMOP_DEF_1(iemOp_grp3_imul_Ev, uint8_t, bRm)
14914{
14915 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
14916 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14917 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
14918}
14919
14920
14921/**
14922 * @opmaps grp3_f7
14923 * @opcode /6
14924 * @opflclass division
14925 */
14926FNIEMOP_DEF_1(iemOp_grp3_div_Ev, uint8_t, bRm)
14927{
14928 IEMOP_MNEMONIC(div_Ev, "div Ev");
14929 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14930 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
14931}
14932
14933
14934/**
14935 * @opmaps grp3_f7
14936 * @opcode /7
14937 * @opflclass division
14938 */
14939FNIEMOP_DEF_1(iemOp_grp3_idiv_Ev, uint8_t, bRm)
14940{
14941 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
14942 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14943 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
14944}
14945
14946
14947/**
14948 * @opcode 0xf7
14949 */
14950FNIEMOP_DEF(iemOp_Grp3_Ev)
14951{
14952 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14953 switch (IEM_GET_MODRM_REG_8(bRm))
14954 {
14955 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
14956 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
14957 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
14958 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
14959 case 4: return FNIEMOP_CALL_1(iemOp_grp3_mul_Ev, bRm);
14960 case 5: return FNIEMOP_CALL_1(iemOp_grp3_imul_Ev, bRm);
14961 case 6: return FNIEMOP_CALL_1(iemOp_grp3_div_Ev, bRm);
14962 case 7: return FNIEMOP_CALL_1(iemOp_grp3_idiv_Ev, bRm);
14963 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14964 }
14965}
14966
14967
14968/**
14969 * @opcode 0xf8
14970 * @opflmodify cf
14971 * @opflclear cf
14972 */
14973FNIEMOP_DEF(iemOp_clc)
14974{
14975 IEMOP_MNEMONIC(clc, "clc");
14976 IEM_MC_BEGIN(0, 0, 0, 0);
14977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14978 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
14979 IEM_MC_ADVANCE_RIP_AND_FINISH();
14980 IEM_MC_END();
14981}
14982
14983
14984/**
14985 * @opcode 0xf9
14986 * @opflmodify cf
14987 * @opflset cf
14988 */
14989FNIEMOP_DEF(iemOp_stc)
14990{
14991 IEMOP_MNEMONIC(stc, "stc");
14992 IEM_MC_BEGIN(0, 0, 0, 0);
14993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14994 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
14995 IEM_MC_ADVANCE_RIP_AND_FINISH();
14996 IEM_MC_END();
14997}
14998
14999
15000/**
15001 * @opcode 0xfa
15002 * @opfltest iopl,vm
15003 * @opflmodify if,vif
15004 */
15005FNIEMOP_DEF(iemOp_cli)
15006{
15007 IEMOP_MNEMONIC(cli, "cli");
15008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15009 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, 0, iemCImpl_cli);
15010}
15011
15012
15013/**
15014 * @opcode 0xfb
15015 * @opfltest iopl,vm
15016 * @opflmodify if,vif
15017 */
15018FNIEMOP_DEF(iemOp_sti)
15019{
15020 IEMOP_MNEMONIC(sti, "sti");
15021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15022 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
15023 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, 0, iemCImpl_sti);
15024}
15025
15026
15027/**
15028 * @opcode 0xfc
15029 * @opflmodify df
15030 * @opflclear df
15031 */
15032FNIEMOP_DEF(iemOp_cld)
15033{
15034 IEMOP_MNEMONIC(cld, "cld");
15035 IEM_MC_BEGIN(0, 0, 0, 0);
15036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15037 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
15038 IEM_MC_ADVANCE_RIP_AND_FINISH();
15039 IEM_MC_END();
15040}
15041
15042
15043/**
15044 * @opcode 0xfd
15045 * @opflmodify df
15046 * @opflset df
15047 */
15048FNIEMOP_DEF(iemOp_std)
15049{
15050 IEMOP_MNEMONIC(std, "std");
15051 IEM_MC_BEGIN(0, 0, 0, 0);
15052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15053 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
15054 IEM_MC_ADVANCE_RIP_AND_FINISH();
15055 IEM_MC_END();
15056}
15057
15058
15059/**
15060 * @opmaps grp4
15061 * @opcode /0
15062 * @opflclass incdec
15063 */
15064FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
15065{
15066 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
15067 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
15068}
15069
15070
15071/**
15072 * @opmaps grp4
15073 * @opcode /1
15074 * @opflclass incdec
15075 */
15076FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
15077{
15078 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
15079 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
15080}
15081
15082
15083/**
15084 * @opcode 0xfe
15085 */
15086FNIEMOP_DEF(iemOp_Grp4)
15087{
15088 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15089 switch (IEM_GET_MODRM_REG_8(bRm))
15090 {
15091 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
15092 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
15093 default:
15094 /** @todo is the eff-addr decoded? */
15095 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
15096 IEMOP_RAISE_INVALID_OPCODE_RET();
15097 }
15098}
15099
15100/**
15101 * @opmaps grp5
15102 * @opcode /0
15103 * @opflclass incdec
15104 */
15105FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
15106{
15107 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
15108 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
15109 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
15110}
15111
15112
15113/**
15114 * @opmaps grp5
15115 * @opcode /1
15116 * @opflclass incdec
15117 */
15118FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
15119{
15120 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
15121 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
15122 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
15123}
15124
15125
15126/**
15127 * Opcode 0xff /2.
15128 * @param bRm The RM byte.
15129 */
15130FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
15131{
15132 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
15133 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
15134
15135 if (IEM_IS_MODRM_REG_MODE(bRm))
15136 {
15137 /* The new RIP is taken from a register. */
15138 switch (pVCpu->iem.s.enmEffOpSize)
15139 {
15140 case IEMMODE_16BIT:
15141 IEM_MC_BEGIN(1, 0, 0, 0);
15142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15143 IEM_MC_ARG(uint16_t, u16Target, 0);
15144 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15145 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
15146 IEM_MC_END();
15147 break;
15148
15149 case IEMMODE_32BIT:
15150 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
15151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15152 IEM_MC_ARG(uint32_t, u32Target, 0);
15153 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15154 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
15155 IEM_MC_END();
15156 break;
15157
15158 case IEMMODE_64BIT:
15159 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
15160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15161 IEM_MC_ARG(uint64_t, u64Target, 0);
15162 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15163 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
15164 IEM_MC_END();
15165 break;
15166
15167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15168 }
15169 }
15170 else
15171 {
15172 /* The new RIP is taken from a register. */
15173 switch (pVCpu->iem.s.enmEffOpSize)
15174 {
15175 case IEMMODE_16BIT:
15176 IEM_MC_BEGIN(1, 1, 0, 0);
15177 IEM_MC_ARG(uint16_t, u16Target, 0);
15178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15181 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15182 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
15183 IEM_MC_END();
15184 break;
15185
15186 case IEMMODE_32BIT:
15187 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
15188 IEM_MC_ARG(uint32_t, u32Target, 0);
15189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15192 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15193 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
15194 IEM_MC_END();
15195 break;
15196
15197 case IEMMODE_64BIT:
15198 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT, 0);
15199 IEM_MC_ARG(uint64_t, u64Target, 0);
15200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15203 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15204 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
15205 IEM_MC_END();
15206 break;
15207
15208 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15209 }
15210 }
15211}
15212
15213#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl, a_fCImplExtra) \
15214 /* Registers? How?? */ \
15215 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
15216 { /* likely */ } \
15217 else \
15218 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
15219 \
15220 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
15221 /** @todo what does VIA do? */ \
15222 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
15223 { /* likely */ } \
15224 else \
15225 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
15226 \
15227 /* Far pointer loaded from memory. */ \
15228 switch (pVCpu->iem.s.enmEffOpSize) \
15229 { \
15230 case IEMMODE_16BIT: \
15231 IEM_MC_BEGIN(3, 1, 0, 0); \
15232 IEM_MC_ARG(uint16_t, u16Sel, 0); \
15233 IEM_MC_ARG(uint16_t, offSeg, 1); \
15234 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
15235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
15236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
15237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
15238 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
15239 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
15240 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
15241 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
15242 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
15243 IEM_MC_END(); \
15244 break; \
15245 \
15246 case IEMMODE_32BIT: \
15247 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
15248 IEM_MC_ARG(uint16_t, u16Sel, 0); \
15249 IEM_MC_ARG(uint32_t, offSeg, 1); \
15250 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
15251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
15252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
15253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
15254 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
15255 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
15256 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
15257 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
15258 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
15259 IEM_MC_END(); \
15260 break; \
15261 \
15262 case IEMMODE_64BIT: \
15263 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
15264 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0); \
15265 IEM_MC_ARG(uint16_t, u16Sel, 0); \
15266 IEM_MC_ARG(uint64_t, offSeg, 1); \
15267 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
15268 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
15269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
15270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
15271 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
15272 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
15273 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
15274 | IEM_CIMPL_F_MODE /* no gates */, 0, \
15275 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
15276 IEM_MC_END(); \
15277 break; \
15278 \
15279 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
15280 } do {} while (0)
15281
15282
15283/**
15284 * Opcode 0xff /3.
15285 * @param bRm The RM byte.
15286 */
15287FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
15288{
15289 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
15290 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf, IEM_CIMPL_F_BRANCH_STACK);
15291}
15292
15293
15294/**
15295 * Opcode 0xff /4.
15296 * @param bRm The RM byte.
15297 */
15298FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
15299{
15300 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
15301 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
15302
15303 if (IEM_IS_MODRM_REG_MODE(bRm))
15304 {
15305 /* The new RIP is taken from a register. */
15306 switch (pVCpu->iem.s.enmEffOpSize)
15307 {
15308 case IEMMODE_16BIT:
15309 IEM_MC_BEGIN(0, 1, 0, 0);
15310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15311 IEM_MC_LOCAL(uint16_t, u16Target);
15312 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15313 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
15314 IEM_MC_END();
15315 break;
15316
15317 case IEMMODE_32BIT:
15318 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
15319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15320 IEM_MC_LOCAL(uint32_t, u32Target);
15321 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15322 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
15323 IEM_MC_END();
15324 break;
15325
15326 case IEMMODE_64BIT:
15327 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
15328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15329 IEM_MC_LOCAL(uint64_t, u64Target);
15330 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15331 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
15332 IEM_MC_END();
15333 break;
15334
15335 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15336 }
15337 }
15338 else
15339 {
15340 /* The new RIP is taken from a memory location. */
15341 switch (pVCpu->iem.s.enmEffOpSize)
15342 {
15343 case IEMMODE_16BIT:
15344 IEM_MC_BEGIN(0, 2, 0, 0);
15345 IEM_MC_LOCAL(uint16_t, u16Target);
15346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15349 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15350 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
15351 IEM_MC_END();
15352 break;
15353
15354 case IEMMODE_32BIT:
15355 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
15356 IEM_MC_LOCAL(uint32_t, u32Target);
15357 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15360 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15361 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
15362 IEM_MC_END();
15363 break;
15364
15365 case IEMMODE_64BIT:
15366 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
15367 IEM_MC_LOCAL(uint64_t, u64Target);
15368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15371 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15372 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
15373 IEM_MC_END();
15374 break;
15375
15376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15377 }
15378 }
15379}
15380
15381
15382/**
15383 * Opcode 0xff /5.
15384 * @param bRm The RM byte.
15385 */
15386FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
15387{
15388 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
15389 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp, 0);
15390}
15391
15392
15393/**
15394 * Opcode 0xff /6.
15395 * @param bRm The RM byte.
15396 */
15397FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
15398{
15399 IEMOP_MNEMONIC(push_Ev, "push Ev");
15400
15401 /* Registers are handled by a common worker. */
15402 if (IEM_IS_MODRM_REG_MODE(bRm))
15403 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
15404
15405 /* Memory we do here. */
15406 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15407 switch (pVCpu->iem.s.enmEffOpSize)
15408 {
15409 case IEMMODE_16BIT:
15410 IEM_MC_BEGIN(0, 2, 0, 0);
15411 IEM_MC_LOCAL(uint16_t, u16Src);
15412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15415 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15416 IEM_MC_PUSH_U16(u16Src);
15417 IEM_MC_ADVANCE_RIP_AND_FINISH();
15418 IEM_MC_END();
15419 break;
15420
15421 case IEMMODE_32BIT:
15422 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
15423 IEM_MC_LOCAL(uint32_t, u32Src);
15424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15427 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15428 IEM_MC_PUSH_U32(u32Src);
15429 IEM_MC_ADVANCE_RIP_AND_FINISH();
15430 IEM_MC_END();
15431 break;
15432
15433 case IEMMODE_64BIT:
15434 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
15435 IEM_MC_LOCAL(uint64_t, u64Src);
15436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15439 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15440 IEM_MC_PUSH_U64(u64Src);
15441 IEM_MC_ADVANCE_RIP_AND_FINISH();
15442 IEM_MC_END();
15443 break;
15444
15445 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15446 }
15447}
15448
15449
15450/**
15451 * @opcode 0xff
15452 */
15453FNIEMOP_DEF(iemOp_Grp5)
15454{
15455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15456 switch (IEM_GET_MODRM_REG_8(bRm))
15457 {
15458 case 0:
15459 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
15460 case 1:
15461 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
15462 case 2:
15463 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
15464 case 3:
15465 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
15466 case 4:
15467 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
15468 case 5:
15469 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
15470 case 6:
15471 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
15472 case 7:
15473 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
15474 IEMOP_RAISE_INVALID_OPCODE_RET();
15475 }
15476 AssertFailedReturn(VERR_IEM_IPE_3);
15477}
15478
15479
15480
15481const PFNIEMOP g_apfnOneByteMap[256] =
15482{
15483 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
15484 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
15485 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
15486 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
15487 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
15488 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
15489 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
15490 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
15491 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
15492 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
15493 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
15494 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
15495 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
15496 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
15497 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
15498 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
15499 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
15500 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
15501 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
15502 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
15503 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
15504 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
15505 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
15506 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
15507 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
15508 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
15509 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
15510 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
15511 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
15512 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
15513 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
15514 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
15515 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
15516 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
15517 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
15518 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
15519 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
15520 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
15521 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
15522 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
15523 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
15524 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
15525 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
15526 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
15527 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
15528 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
15529 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
15530 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
15531 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
15532 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
15533 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
15534 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
15535 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
15536 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
15537 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
15538 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
15539 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
15540 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
15541 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
15542 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
15543 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
15544 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
15545 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
15546 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
15547};
15548
15549
15550/** @} */
15551
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette