VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 103235

Last change on this file since 103235 was 103235, checked in by vboxsync, 14 months ago

VMM/IEM: Liveness analysis, part 7b: Release build fix for visual C++. bugref:10372

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 552.9 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 103235 2024-02-07 01:38:33Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 */
63#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8, a_fnLockedU8) \
64 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
65 \
66 /* \
67 * If rm is denoting a register, no more instruction bytes. \
68 */ \
69 if (IEM_IS_MODRM_REG_MODE(bRm)) \
70 { \
71 IEM_MC_BEGIN(3, 0, 0, 0); \
72 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
73 IEM_MC_ARG(uint8_t, u8Src, 1); \
74 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
75 \
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
77 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
78 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
79 IEM_MC_REF_EFLAGS(pEFlags); \
80 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
81 \
82 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
83 IEM_MC_END(); \
84 } \
85 else \
86 { \
87 /* \
88 * We're accessing memory. \
89 * Note! We're putting the eflags on the stack here so we can commit them \
90 * after the memory. \
91 */ \
92 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
93 { \
94 IEM_MC_BEGIN(3, 3, 0, 0); \
95 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
96 IEM_MC_ARG(uint8_t, u8Src, 1); \
97 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
98 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
99 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
100 \
101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
102 IEMOP_HLP_DONE_DECODING(); \
103 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
104 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
105 IEM_MC_FETCH_EFLAGS(EFlags); \
106 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
107 \
108 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
109 IEM_MC_COMMIT_EFLAGS(EFlags); \
110 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
111 IEM_MC_END(); \
112 } \
113 else \
114 { \
115 IEM_MC_BEGIN(3, 3, 0, 0); \
116 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
117 IEM_MC_ARG(uint8_t, u8Src, 1); \
118 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
120 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
121 \
122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
123 IEMOP_HLP_DONE_DECODING(); \
124 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
125 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
126 IEM_MC_FETCH_EFLAGS(EFlags); \
127 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
128 \
129 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bMapInfoDst); \
130 IEM_MC_COMMIT_EFLAGS(EFlags); \
131 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
132 IEM_MC_END(); \
133 } \
134 } \
135 (void)0
136
137/**
138 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
139 * operands.
140 */
141#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8) \
142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
143 \
144 /* \
145 * If rm is denoting a register, no more instruction bytes. \
146 */ \
147 if (IEM_IS_MODRM_REG_MODE(bRm)) \
148 { \
149 IEM_MC_BEGIN(3, 0, 0, 0); \
150 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
151 IEM_MC_ARG(uint8_t, u8Src, 1); \
152 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
153 \
154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
155 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
156 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
157 IEM_MC_REF_EFLAGS(pEFlags); \
158 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
159 \
160 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
161 IEM_MC_END(); \
162 } \
163 else \
164 { \
165 /* \
166 * We're accessing memory. \
167 * Note! We're putting the eflags on the stack here so we can commit them \
168 * after the memory. \
169 */ \
170 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
171 { \
172 IEM_MC_BEGIN(3, 3, 0, 0); \
173 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
174 IEM_MC_ARG(uint8_t, u8Src, 1); \
175 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
177 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
178 \
179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
180 IEMOP_HLP_DONE_DECODING(); \
181 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
182 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
183 IEM_MC_FETCH_EFLAGS(EFlags); \
184 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
185 \
186 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
187 IEM_MC_COMMIT_EFLAGS(EFlags); \
188 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
189 IEM_MC_END(); \
190 } \
191 else \
192 { \
193 IEMOP_HLP_DONE_DECODING(); \
194 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
195 } \
196 } \
197 (void)0
198
199/**
200 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
201 * destination.
202 */
203#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
204 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
205 \
206 /* \
207 * If rm is denoting a register, no more instruction bytes. \
208 */ \
209 if (IEM_IS_MODRM_REG_MODE(bRm)) \
210 { \
211 IEM_MC_BEGIN(3, 0, 0, 0); \
212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
213 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
214 IEM_MC_ARG(uint8_t, u8Src, 1); \
215 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
216 \
217 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
218 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
219 IEM_MC_REF_EFLAGS(pEFlags); \
220 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
221 \
222 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
223 IEM_MC_END(); \
224 } \
225 else \
226 { \
227 /* \
228 * We're accessing memory. \
229 */ \
230 IEM_MC_BEGIN(3, 1, 0, 0); \
231 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
232 IEM_MC_ARG(uint8_t, u8Src, 1); \
233 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
235 \
236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
238 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
239 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
240 IEM_MC_REF_EFLAGS(pEFlags); \
241 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
242 \
243 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
244 IEM_MC_END(); \
245 } \
246 (void)0
247
248
249/**
250 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
251 * memory/register as the destination.
252 */
253#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
255 \
256 /* \
257 * If rm is denoting a register, no more instruction bytes. \
258 */ \
259 if (IEM_IS_MODRM_REG_MODE(bRm)) \
260 { \
261 switch (pVCpu->iem.s.enmEffOpSize) \
262 { \
263 case IEMMODE_16BIT: \
264 IEM_MC_BEGIN(3, 0, 0, 0); \
265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
266 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
267 IEM_MC_ARG(uint16_t, u16Src, 1); \
268 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
269 \
270 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
271 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
272 IEM_MC_REF_EFLAGS(pEFlags); \
273 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
274 \
275 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
276 IEM_MC_END(); \
277 break; \
278 \
279 case IEMMODE_32BIT: \
280 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
282 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
283 IEM_MC_ARG(uint32_t, u32Src, 1); \
284 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
285 \
286 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
287 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
288 IEM_MC_REF_EFLAGS(pEFlags); \
289 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
290 \
291 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
292 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
293 IEM_MC_END(); \
294 break; \
295 \
296 case IEMMODE_64BIT: \
297 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
299 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
300 IEM_MC_ARG(uint64_t, u64Src, 1); \
301 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
302 \
303 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
304 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
305 IEM_MC_REF_EFLAGS(pEFlags); \
306 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
307 \
308 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
309 IEM_MC_END(); \
310 break; \
311 \
312 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
313 } \
314 } \
315 else \
316 { \
317 /* \
318 * We're accessing memory. \
319 * Note! We're putting the eflags on the stack here so we can commit them \
320 * after the memory. \
321 */ \
322 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
323 { \
324 switch (pVCpu->iem.s.enmEffOpSize) \
325 { \
326 case IEMMODE_16BIT: \
327 IEM_MC_BEGIN(3, 3, 0, 0); \
328 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
329 IEM_MC_ARG(uint16_t, u16Src, 1); \
330 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
332 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
333 \
334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
335 IEMOP_HLP_DONE_DECODING(); \
336 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
337 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
338 IEM_MC_FETCH_EFLAGS(EFlags); \
339 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
340 \
341 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
342 IEM_MC_COMMIT_EFLAGS(EFlags); \
343 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
344 IEM_MC_END(); \
345 break; \
346 \
347 case IEMMODE_32BIT: \
348 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
349 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
350 IEM_MC_ARG(uint32_t, u32Src, 1); \
351 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
353 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
354 \
355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
356 IEMOP_HLP_DONE_DECODING(); \
357 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
358 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
359 IEM_MC_FETCH_EFLAGS(EFlags); \
360 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
361 \
362 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
363 IEM_MC_COMMIT_EFLAGS(EFlags); \
364 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
365 IEM_MC_END(); \
366 break; \
367 \
368 case IEMMODE_64BIT: \
369 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
370 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
371 IEM_MC_ARG(uint64_t, u64Src, 1); \
372 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
374 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
375 \
376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
377 IEMOP_HLP_DONE_DECODING(); \
378 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
379 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
380 IEM_MC_FETCH_EFLAGS(EFlags); \
381 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
382 \
383 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
384 IEM_MC_COMMIT_EFLAGS(EFlags); \
385 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
386 IEM_MC_END(); \
387 break; \
388 \
389 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
390 } \
391 } \
392 else \
393 { \
394 (void)0
395/* Separate macro to work around parsing issue in IEMAllInstPython.py */
396#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
397 switch (pVCpu->iem.s.enmEffOpSize) \
398 { \
399 case IEMMODE_16BIT: \
400 IEM_MC_BEGIN(3, 3, 0, 0); \
401 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
402 IEM_MC_ARG(uint16_t, u16Src, 1); \
403 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
405 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
406 \
407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
408 IEMOP_HLP_DONE_DECODING(); \
409 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
410 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
411 IEM_MC_FETCH_EFLAGS(EFlags); \
412 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
413 \
414 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
415 IEM_MC_COMMIT_EFLAGS(EFlags); \
416 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
417 IEM_MC_END(); \
418 break; \
419 \
420 case IEMMODE_32BIT: \
421 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
422 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
423 IEM_MC_ARG(uint32_t, u32Src, 1); \
424 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
426 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
427 \
428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
429 IEMOP_HLP_DONE_DECODING(); \
430 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
431 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
432 IEM_MC_FETCH_EFLAGS(EFlags); \
433 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
434 \
435 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo /* CMP,TEST */); \
436 IEM_MC_COMMIT_EFLAGS(EFlags); \
437 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
438 IEM_MC_END(); \
439 break; \
440 \
441 case IEMMODE_64BIT: \
442 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
443 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
444 IEM_MC_ARG(uint64_t, u64Src, 1); \
445 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
447 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
448 \
449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
450 IEMOP_HLP_DONE_DECODING(); \
451 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
452 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
453 IEM_MC_FETCH_EFLAGS(EFlags); \
454 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
455 \
456 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
457 IEM_MC_COMMIT_EFLAGS(EFlags); \
458 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
459 IEM_MC_END(); \
460 break; \
461 \
462 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
463 } \
464 } \
465 } \
466 (void)0
467
468/**
469 * Body for read-only word/dword/qword instructions like TEST and CMP with
470 * memory/register as the destination.
471 */
472#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
474 \
475 /* \
476 * If rm is denoting a register, no more instruction bytes. \
477 */ \
478 if (IEM_IS_MODRM_REG_MODE(bRm)) \
479 { \
480 switch (pVCpu->iem.s.enmEffOpSize) \
481 { \
482 case IEMMODE_16BIT: \
483 IEM_MC_BEGIN(3, 0, 0, 0); \
484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
485 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
486 IEM_MC_ARG(uint16_t, u16Src, 1); \
487 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
488 \
489 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
490 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
491 IEM_MC_REF_EFLAGS(pEFlags); \
492 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
493 \
494 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
495 IEM_MC_END(); \
496 break; \
497 \
498 case IEMMODE_32BIT: \
499 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
501 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
502 IEM_MC_ARG(uint32_t, u32Src, 1); \
503 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
504 \
505 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
506 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
507 IEM_MC_REF_EFLAGS(pEFlags); \
508 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
509 \
510 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
511 IEM_MC_END(); \
512 break; \
513 \
514 case IEMMODE_64BIT: \
515 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
517 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
518 IEM_MC_ARG(uint64_t, u64Src, 1); \
519 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
520 \
521 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
522 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
523 IEM_MC_REF_EFLAGS(pEFlags); \
524 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
525 \
526 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
527 IEM_MC_END(); \
528 break; \
529 \
530 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
531 } \
532 } \
533 else \
534 { \
535 /* \
536 * We're accessing memory. \
537 * Note! We're putting the eflags on the stack here so we can commit them \
538 * after the memory. \
539 */ \
540 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
541 { \
542 switch (pVCpu->iem.s.enmEffOpSize) \
543 { \
544 case IEMMODE_16BIT: \
545 IEM_MC_BEGIN(3, 3, 0, 0); \
546 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
547 IEM_MC_ARG(uint16_t, u16Src, 1); \
548 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
550 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
551 \
552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
553 IEMOP_HLP_DONE_DECODING(); \
554 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
555 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
556 IEM_MC_FETCH_EFLAGS(EFlags); \
557 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
558 \
559 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
560 IEM_MC_COMMIT_EFLAGS(EFlags); \
561 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
562 IEM_MC_END(); \
563 break; \
564 \
565 case IEMMODE_32BIT: \
566 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
567 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
568 IEM_MC_ARG(uint32_t, u32Src, 1); \
569 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
571 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
572 \
573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
574 IEMOP_HLP_DONE_DECODING(); \
575 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
576 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
577 IEM_MC_FETCH_EFLAGS(EFlags); \
578 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
579 \
580 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
581 IEM_MC_COMMIT_EFLAGS(EFlags); \
582 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
583 IEM_MC_END(); \
584 break; \
585 \
586 case IEMMODE_64BIT: \
587 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
588 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
589 IEM_MC_ARG(uint64_t, u64Src, 1); \
590 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
592 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
593 \
594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
595 IEMOP_HLP_DONE_DECODING(); \
596 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
597 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
598 IEM_MC_FETCH_EFLAGS(EFlags); \
599 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
600 \
601 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
602 IEM_MC_COMMIT_EFLAGS(EFlags); \
603 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
604 IEM_MC_END(); \
605 break; \
606 \
607 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
608 } \
609 } \
610 else \
611 { \
612 IEMOP_HLP_DONE_DECODING(); \
613 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
614 } \
615 } \
616 (void)0
617
618
619/**
620 * Body for instructions like ADD, AND, OR, ++ with working on AL with
621 * a byte immediate.
622 */
623#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
624 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
625 \
626 IEM_MC_BEGIN(3, 0, 0, 0); \
627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
628 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
629 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
630 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
631 \
632 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
633 IEM_MC_REF_EFLAGS(pEFlags); \
634 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
635 \
636 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
637 IEM_MC_END()
638
639/**
640 * Body for instructions like ADD, AND, OR, ++ with working on
641 * AX/EAX/RAX with a word/dword immediate.
642 */
643#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
644 switch (pVCpu->iem.s.enmEffOpSize) \
645 { \
646 case IEMMODE_16BIT: \
647 { \
648 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
649 \
650 IEM_MC_BEGIN(3, 0, 0, 0); \
651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
652 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
653 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
654 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
655 \
656 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
657 IEM_MC_REF_EFLAGS(pEFlags); \
658 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
659 \
660 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
661 IEM_MC_END(); \
662 } \
663 \
664 case IEMMODE_32BIT: \
665 { \
666 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
667 \
668 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
670 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
671 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
672 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
673 \
674 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
675 IEM_MC_REF_EFLAGS(pEFlags); \
676 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
677 \
678 if (a_fModifiesDstReg) \
679 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
680 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
681 IEM_MC_END(); \
682 } \
683 \
684 case IEMMODE_64BIT: \
685 { \
686 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
687 \
688 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
690 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
691 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
692 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
693 \
694 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
695 IEM_MC_REF_EFLAGS(pEFlags); \
696 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
697 \
698 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
699 IEM_MC_END(); \
700 } \
701 \
702 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
703 } \
704 (void)0
705
706
707
708/* Instruction specification format - work in progress: */
709
710/**
711 * @opcode 0x00
712 * @opmnemonic add
713 * @op1 rm:Eb
714 * @op2 reg:Gb
715 * @opmaps one
716 * @openc ModR/M
717 * @opflclass arithmetic
718 * @ophints harmless ignores_op_sizes
719 * @opstats add_Eb_Gb
720 * @opgroup og_gen_arith_bin
721 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
722 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
723 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
724 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
725 */
726FNIEMOP_DEF(iemOp_add_Eb_Gb)
727{
728 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
729 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_add_u8, iemAImpl_add_u8_locked);
730}
731
732
733/**
734 * @opcode 0x01
735 * @opgroup og_gen_arith_bin
736 * @opflclass arithmetic
737 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
738 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
739 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
740 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
741 */
742FNIEMOP_DEF(iemOp_add_Ev_Gv)
743{
744 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
745 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
746 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
747}
748
749
750/**
751 * @opcode 0x02
752 * @opgroup og_gen_arith_bin
753 * @opflclass arithmetic
754 * @opcopytests iemOp_add_Eb_Gb
755 */
756FNIEMOP_DEF(iemOp_add_Gb_Eb)
757{
758 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
759 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
760}
761
762
763/**
764 * @opcode 0x03
765 * @opgroup og_gen_arith_bin
766 * @opflclass arithmetic
767 * @opcopytests iemOp_add_Ev_Gv
768 */
769FNIEMOP_DEF(iemOp_add_Gv_Ev)
770{
771 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
772 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1, 0);
773}
774
775
776/**
777 * @opcode 0x04
778 * @opgroup og_gen_arith_bin
779 * @opflclass arithmetic
780 * @opcopytests iemOp_add_Eb_Gb
781 */
782FNIEMOP_DEF(iemOp_add_Al_Ib)
783{
784 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
785 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
786}
787
788
789/**
790 * @opcode 0x05
791 * @opgroup og_gen_arith_bin
792 * @opflclass arithmetic
793 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
794 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
795 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
796 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
797 */
798FNIEMOP_DEF(iemOp_add_eAX_Iz)
799{
800 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
801 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
802}
803
804
805/**
806 * @opcode 0x06
807 * @opgroup og_stack_sreg
808 */
809FNIEMOP_DEF(iemOp_push_ES)
810{
811 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
812 IEMOP_HLP_NO_64BIT();
813 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
814}
815
816
817/**
818 * @opcode 0x07
819 * @opgroup og_stack_sreg
820 */
821FNIEMOP_DEF(iemOp_pop_ES)
822{
823 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
824 IEMOP_HLP_NO_64BIT();
825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
826 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
827 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
828 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
829 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
830 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
831 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES),
832 iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
833}
834
835
836/**
837 * @opcode 0x08
838 * @opgroup og_gen_arith_bin
839 * @opflclass logical
840 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
841 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
842 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
843 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
844 */
845FNIEMOP_DEF(iemOp_or_Eb_Gb)
846{
847 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
848 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
849 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_or_u8, iemAImpl_or_u8_locked);
850}
851
852
853/*
854 * @opcode 0x09
855 * @opgroup og_gen_arith_bin
856 * @opflclass logical
857 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
858 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
859 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
860 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
861 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
862 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
863 * @note AF is documented as undefined, but both modern AMD and Intel CPUs clears it.
864 */
865FNIEMOP_DEF(iemOp_or_Ev_Gv)
866{
867 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
868 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
869 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
870 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
871}
872
873
874/**
875 * @opcode 0x0a
876 * @opgroup og_gen_arith_bin
877 * @opflclass logical
878 * @opcopytests iemOp_or_Eb_Gb
879 */
880FNIEMOP_DEF(iemOp_or_Gb_Eb)
881{
882 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
883 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
884 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
885}
886
887
888/**
889 * @opcode 0x0b
890 * @opgroup og_gen_arith_bin
891 * @opflclass logical
892 * @opcopytests iemOp_or_Ev_Gv
893 */
894FNIEMOP_DEF(iemOp_or_Gv_Ev)
895{
896 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
897 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
898 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1, 0);
899}
900
901
902/**
903 * @opcode 0x0c
904 * @opgroup og_gen_arith_bin
905 * @opflclass logical
906 * @opcopytests iemOp_or_Eb_Gb
907 */
908FNIEMOP_DEF(iemOp_or_Al_Ib)
909{
910 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
911 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
912 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
913}
914
915
916/**
917 * @opcode 0x0d
918 * @opgroup og_gen_arith_bin
919 * @opflclass logical
920 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
921 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
922 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
923 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
924 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
925 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
926 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
927 */
928FNIEMOP_DEF(iemOp_or_eAX_Iz)
929{
930 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
931 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
932 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
933}
934
935
936/**
937 * @opcode 0x0e
938 * @opgroup og_stack_sreg
939 */
940FNIEMOP_DEF(iemOp_push_CS)
941{
942 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
943 IEMOP_HLP_NO_64BIT();
944 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
945}
946
947
948/**
949 * @opcode 0x0f
950 * @opmnemonic EscTwo0f
951 * @openc two0f
952 * @opdisenum OP_2B_ESC
953 * @ophints harmless
954 * @opgroup og_escapes
955 */
956FNIEMOP_DEF(iemOp_2byteEscape)
957{
958#if 0 /// @todo def VBOX_STRICT
959 /* Sanity check the table the first time around. */
960 static bool s_fTested = false;
961 if (RT_LIKELY(s_fTested)) { /* likely */ }
962 else
963 {
964 s_fTested = true;
965 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
966 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
967 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
968 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
969 }
970#endif
971
972 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
973 {
974 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
975 IEMOP_HLP_MIN_286();
976 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
977 }
978 /* @opdone */
979
980 /*
981 * On the 8086 this is a POP CS instruction.
982 * For the time being we don't specify this this.
983 */
984 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
985 IEMOP_HLP_NO_64BIT();
986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
987 /** @todo eliminate END_TB here */
988 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
989 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
990 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_CS),
991 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
992}
993
994/**
995 * @opcode 0x10
996 * @opgroup og_gen_arith_bin
997 * @opflclass arithmetic_carry
998 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
999 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1000 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1001 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1002 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1003 */
1004FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1005{
1006 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1007 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_adc_u8, iemAImpl_adc_u8_locked);
1008}
1009
1010
1011/**
1012 * @opcode 0x11
1013 * @opgroup og_gen_arith_bin
1014 * @opflclass arithmetic_carry
1015 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1016 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1017 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1018 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1019 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1020 */
1021FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1022{
1023 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1024 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
1025 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1026}
1027
1028
1029/**
1030 * @opcode 0x12
1031 * @opgroup og_gen_arith_bin
1032 * @opflclass arithmetic_carry
1033 * @opcopytests iemOp_adc_Eb_Gb
1034 */
1035FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1036{
1037 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1038 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
1039}
1040
1041
1042/**
1043 * @opcode 0x13
1044 * @opgroup og_gen_arith_bin
1045 * @opflclass arithmetic_carry
1046 * @opcopytests iemOp_adc_Ev_Gv
1047 */
1048FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1049{
1050 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1051 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1, 0);
1052}
1053
1054
1055/**
1056 * @opcode 0x14
1057 * @opgroup og_gen_arith_bin
1058 * @opflclass arithmetic_carry
1059 * @opcopytests iemOp_adc_Eb_Gb
1060 */
1061FNIEMOP_DEF(iemOp_adc_Al_Ib)
1062{
1063 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1064 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1065}
1066
1067
1068/**
1069 * @opcode 0x15
1070 * @opgroup og_gen_arith_bin
1071 * @opflclass arithmetic_carry
1072 * @opcopytests iemOp_adc_Ev_Gv
1073 */
1074FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1075{
1076 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1077 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1078}
1079
1080
1081/**
1082 * @opcode 0x16
1083 */
1084FNIEMOP_DEF(iemOp_push_SS)
1085{
1086 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1087 IEMOP_HLP_NO_64BIT();
1088 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1089}
1090
1091
1092/**
1093 * @opcode 0x17
1094 */
1095FNIEMOP_DEF(iemOp_pop_SS)
1096{
1097 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1099 IEMOP_HLP_NO_64BIT();
1100 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1101 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1102 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_SS)
1103 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_SS)
1104 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_SS)
1105 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_SS),
1106 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1107}
1108
1109
1110/**
1111 * @opcode 0x18
1112 * @opgroup og_gen_arith_bin
1113 * @opflclass arithmetic_carry
1114 */
1115FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1116{
1117 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1118 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_sbb_u8, iemAImpl_sbb_u8_locked);
1119}
1120
1121
1122/**
1123 * @opcode 0x19
1124 * @opgroup og_gen_arith_bin
1125 * @opflclass arithmetic_carry
1126 */
1127FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1128{
1129 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1130 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
1131 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1132}
1133
1134
1135/**
1136 * @opcode 0x1a
1137 * @opgroup og_gen_arith_bin
1138 * @opflclass arithmetic_carry
1139 */
1140FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1141{
1142 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1143 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
1144}
1145
1146
1147/**
1148 * @opcode 0x1b
1149 * @opgroup og_gen_arith_bin
1150 * @opflclass arithmetic_carry
1151 */
1152FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1153{
1154 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1155 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1, 0);
1156}
1157
1158
1159/**
1160 * @opcode 0x1c
1161 * @opgroup og_gen_arith_bin
1162 * @opflclass arithmetic_carry
1163 */
1164FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1165{
1166 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1167 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1168}
1169
1170
1171/**
1172 * @opcode 0x1d
1173 * @opgroup og_gen_arith_bin
1174 * @opflclass arithmetic_carry
1175 */
1176FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1177{
1178 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1179 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1180}
1181
1182
1183/**
1184 * @opcode 0x1e
1185 * @opgroup og_stack_sreg
1186 */
1187FNIEMOP_DEF(iemOp_push_DS)
1188{
1189 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1190 IEMOP_HLP_NO_64BIT();
1191 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1192}
1193
1194
1195/**
1196 * @opcode 0x1f
1197 * @opgroup og_stack_sreg
1198 */
1199FNIEMOP_DEF(iemOp_pop_DS)
1200{
1201 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1203 IEMOP_HLP_NO_64BIT();
1204 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1205 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1206 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
1207 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
1208 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
1209 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS),
1210 iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1211}
1212
1213
1214/**
1215 * @opcode 0x20
1216 * @opgroup og_gen_arith_bin
1217 * @opflclass logical
1218 */
1219FNIEMOP_DEF(iemOp_and_Eb_Gb)
1220{
1221 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1222 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1223 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_and_u8, iemAImpl_and_u8_locked);
1224}
1225
1226
1227/**
1228 * @opcode 0x21
1229 * @opgroup og_gen_arith_bin
1230 * @opflclass logical
1231 */
1232FNIEMOP_DEF(iemOp_and_Ev_Gv)
1233{
1234 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1235 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1236 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
1237 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1238}
1239
1240
1241/**
1242 * @opcode 0x22
1243 * @opgroup og_gen_arith_bin
1244 * @opflclass logical
1245 */
1246FNIEMOP_DEF(iemOp_and_Gb_Eb)
1247{
1248 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1249 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1250 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1251}
1252
1253
1254/**
1255 * @opcode 0x23
1256 * @opgroup og_gen_arith_bin
1257 * @opflclass logical
1258 */
1259FNIEMOP_DEF(iemOp_and_Gv_Ev)
1260{
1261 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1262 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1263 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1, 0);
1264}
1265
1266
1267/**
1268 * @opcode 0x24
1269 * @opgroup og_gen_arith_bin
1270 * @opflclass logical
1271 */
1272FNIEMOP_DEF(iemOp_and_Al_Ib)
1273{
1274 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1275 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1276 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1277}
1278
1279
1280/**
1281 * @opcode 0x25
1282 * @opgroup og_gen_arith_bin
1283 * @opflclass logical
1284 */
1285FNIEMOP_DEF(iemOp_and_eAX_Iz)
1286{
1287 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1288 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1289 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1290}
1291
1292
1293/**
1294 * @opcode 0x26
1295 * @opmnemonic SEG
1296 * @op1 ES
1297 * @opgroup og_prefix
1298 * @openc prefix
1299 * @opdisenum OP_SEG
1300 * @ophints harmless
1301 */
1302FNIEMOP_DEF(iemOp_seg_ES)
1303{
1304 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1305 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1306 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1307
1308 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1309 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1310}
1311
1312
1313/**
1314 * @opcode 0x27
1315 * @opfltest af,cf
1316 * @opflmodify cf,pf,af,zf,sf,of
1317 * @opflundef of
1318 */
1319FNIEMOP_DEF(iemOp_daa)
1320{
1321 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1322 IEMOP_HLP_NO_64BIT();
1323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1324 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1325 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_daa);
1326}
1327
1328
1329/**
1330 * @opcode 0x28
1331 * @opgroup og_gen_arith_bin
1332 * @opflclass arithmetic
1333 */
1334FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1335{
1336 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1337 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_sub_u8, iemAImpl_sub_u8_locked);
1338}
1339
1340
1341/**
1342 * @opcode 0x29
1343 * @opgroup og_gen_arith_bin
1344 * @opflclass arithmetic
1345 */
1346FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1347{
1348 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1349 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
1350 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1351}
1352
1353
1354/**
1355 * @opcode 0x2a
1356 * @opgroup og_gen_arith_bin
1357 * @opflclass arithmetic
1358 */
1359FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1360{
1361 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1362 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1363}
1364
1365
1366/**
1367 * @opcode 0x2b
1368 * @opgroup og_gen_arith_bin
1369 * @opflclass arithmetic
1370 */
1371FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1372{
1373 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1374 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1, 0);
1375}
1376
1377
1378/**
1379 * @opcode 0x2c
1380 * @opgroup og_gen_arith_bin
1381 * @opflclass arithmetic
1382 */
1383FNIEMOP_DEF(iemOp_sub_Al_Ib)
1384{
1385 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1386 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1387}
1388
1389
1390/**
1391 * @opcode 0x2d
1392 * @opgroup og_gen_arith_bin
1393 * @opflclass arithmetic
1394 */
1395FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1396{
1397 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1398 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1399}
1400
1401
1402/**
1403 * @opcode 0x2e
1404 * @opmnemonic SEG
1405 * @op1 CS
1406 * @opgroup og_prefix
1407 * @openc prefix
1408 * @opdisenum OP_SEG
1409 * @ophints harmless
1410 */
1411FNIEMOP_DEF(iemOp_seg_CS)
1412{
1413 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1414 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1415 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1416
1417 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1418 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1419}
1420
1421
1422/**
1423 * @opcode 0x2f
1424 * @opfltest af,cf
1425 * @opflmodify cf,pf,af,zf,sf,of
1426 * @opflundef of
1427 */
1428FNIEMOP_DEF(iemOp_das)
1429{
1430 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1431 IEMOP_HLP_NO_64BIT();
1432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1433 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1434 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_das);
1435}
1436
1437
1438/**
1439 * @opcode 0x30
1440 * @opgroup og_gen_arith_bin
1441 * @opflclass logical
1442 */
1443FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1444{
1445 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1446 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1447 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_xor_u8, iemAImpl_xor_u8_locked);
1448}
1449
1450
1451/**
1452 * @opcode 0x31
1453 * @opgroup og_gen_arith_bin
1454 * @opflclass logical
1455 */
1456FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1457{
1458 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1459 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1460 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
1461 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1462}
1463
1464
1465/**
1466 * @opcode 0x32
1467 * @opgroup og_gen_arith_bin
1468 * @opflclass logical
1469 */
1470FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1471{
1472 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1473 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1474 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1475}
1476
1477
1478/**
1479 * @opcode 0x33
1480 * @opgroup og_gen_arith_bin
1481 * @opflclass logical
1482 */
1483FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1484{
1485 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1486 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1487 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1, 0);
1488}
1489
1490
1491/**
1492 * @opcode 0x34
1493 * @opgroup og_gen_arith_bin
1494 * @opflclass logical
1495 */
1496FNIEMOP_DEF(iemOp_xor_Al_Ib)
1497{
1498 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1499 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1500 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1501}
1502
1503
1504/**
1505 * @opcode 0x35
1506 * @opgroup og_gen_arith_bin
1507 * @opflclass logical
1508 */
1509FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1510{
1511 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1512 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1513 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1514}
1515
1516
1517/**
1518 * @opcode 0x36
1519 * @opmnemonic SEG
1520 * @op1 SS
1521 * @opgroup og_prefix
1522 * @openc prefix
1523 * @opdisenum OP_SEG
1524 * @ophints harmless
1525 */
1526FNIEMOP_DEF(iemOp_seg_SS)
1527{
1528 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1529 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1530 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1531
1532 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1533 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1534}
1535
1536
1537/**
1538 * @opcode 0x37
1539 * @opfltest af
1540 * @opflmodify cf,pf,af,zf,sf,of
1541 * @opflundef pf,zf,sf,of
1542 * @opgroup og_gen_arith_dec
1543 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1544 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1545 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1546 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1547 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1548 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1549 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1550 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1551 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1552 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1553 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1554 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1555 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1556 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1557 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1558 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1559 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1560 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1561 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1562 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1563 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1564 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1565 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1566 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1567 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1568 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1569 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1570 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1571 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1572 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1573 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1574 */
1575FNIEMOP_DEF(iemOp_aaa)
1576{
1577 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1578 IEMOP_HLP_NO_64BIT();
1579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1580 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1581
1582 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aaa);
1583}
1584
1585
1586/**
1587 * @opcode 0x38
1588 * @opflclass arithmetic
1589 */
1590FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1591{
1592 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1593 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8);
1594}
1595
1596
1597/**
1598 * @opcode 0x39
1599 * @opflclass arithmetic
1600 */
1601FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1602{
1603 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1604 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
1605}
1606
1607
1608/**
1609 * @opcode 0x3a
1610 * @opflclass arithmetic
1611 */
1612FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1613{
1614 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1615 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1616}
1617
1618
1619/**
1620 * @opcode 0x3b
1621 * @opflclass arithmetic
1622 */
1623FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1624{
1625 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1626 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0, 0);
1627}
1628
1629
1630/**
1631 * @opcode 0x3c
1632 * @opflclass arithmetic
1633 */
1634FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1635{
1636 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1637 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1638}
1639
1640
1641/**
1642 * @opcode 0x3d
1643 * @opflclass arithmetic
1644 */
1645FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1646{
1647 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1648 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1649}
1650
1651
1652/**
1653 * @opcode 0x3e
1654 */
1655FNIEMOP_DEF(iemOp_seg_DS)
1656{
1657 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1658 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1659 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1660
1661 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1662 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1663}
1664
1665
1666/**
1667 * @opcode 0x3f
1668 * @opfltest af
1669 * @opflmodify cf,pf,af,zf,sf,of
1670 * @opflundef pf,zf,sf,of
1671 * @opgroup og_gen_arith_dec
1672 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1673 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1674 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1675 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1676 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1677 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1678 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1679 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1680 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1681 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1682 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1683 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1684 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1685 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1686 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1687 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1688 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1689 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1690 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1691 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1692 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1693 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1694 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1695 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1696 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1697 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1698 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1699 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1700 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1701 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1702 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1703 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1704 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1705 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1706 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1707 */
1708FNIEMOP_DEF(iemOp_aas)
1709{
1710 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1711 IEMOP_HLP_NO_64BIT();
1712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1713 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1714
1715 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aas);
1716}
1717
1718
1719/**
1720 * Common 'inc/dec register' helper.
1721 *
1722 * Not for 64-bit code, only for what became the rex prefixes.
1723 */
1724#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1725 switch (pVCpu->iem.s.enmEffOpSize) \
1726 { \
1727 case IEMMODE_16BIT: \
1728 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT, 0); \
1729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1730 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1731 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1732 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1733 IEM_MC_REF_EFLAGS(pEFlags); \
1734 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1735 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1736 IEM_MC_END(); \
1737 break; \
1738 \
1739 case IEMMODE_32BIT: \
1740 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
1741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1742 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1743 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1744 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1745 IEM_MC_REF_EFLAGS(pEFlags); \
1746 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1747 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
1748 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1749 IEM_MC_END(); \
1750 break; \
1751 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1752 } \
1753 (void)0
1754
1755/**
1756 * @opcode 0x40
1757 * @opflclass incdec
1758 */
1759FNIEMOP_DEF(iemOp_inc_eAX)
1760{
1761 /*
1762 * This is a REX prefix in 64-bit mode.
1763 */
1764 if (IEM_IS_64BIT_CODE(pVCpu))
1765 {
1766 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1767 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1768
1769 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1770 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1771 }
1772
1773 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1774 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1775}
1776
1777
1778/**
1779 * @opcode 0x41
1780 * @opflclass incdec
1781 */
1782FNIEMOP_DEF(iemOp_inc_eCX)
1783{
1784 /*
1785 * This is a REX prefix in 64-bit mode.
1786 */
1787 if (IEM_IS_64BIT_CODE(pVCpu))
1788 {
1789 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1790 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1791 pVCpu->iem.s.uRexB = 1 << 3;
1792
1793 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1794 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1795 }
1796
1797 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1798 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1799}
1800
1801
1802/**
1803 * @opcode 0x42
1804 * @opflclass incdec
1805 */
1806FNIEMOP_DEF(iemOp_inc_eDX)
1807{
1808 /*
1809 * This is a REX prefix in 64-bit mode.
1810 */
1811 if (IEM_IS_64BIT_CODE(pVCpu))
1812 {
1813 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1814 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1815 pVCpu->iem.s.uRexIndex = 1 << 3;
1816
1817 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1818 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1819 }
1820
1821 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1822 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1823}
1824
1825
1826
1827/**
1828 * @opcode 0x43
1829 * @opflclass incdec
1830 */
1831FNIEMOP_DEF(iemOp_inc_eBX)
1832{
1833 /*
1834 * This is a REX prefix in 64-bit mode.
1835 */
1836 if (IEM_IS_64BIT_CODE(pVCpu))
1837 {
1838 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1839 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1840 pVCpu->iem.s.uRexB = 1 << 3;
1841 pVCpu->iem.s.uRexIndex = 1 << 3;
1842
1843 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1844 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1845 }
1846
1847 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1848 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1849}
1850
1851
1852/**
1853 * @opcode 0x44
1854 * @opflclass incdec
1855 */
1856FNIEMOP_DEF(iemOp_inc_eSP)
1857{
1858 /*
1859 * This is a REX prefix in 64-bit mode.
1860 */
1861 if (IEM_IS_64BIT_CODE(pVCpu))
1862 {
1863 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1864 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1865 pVCpu->iem.s.uRexReg = 1 << 3;
1866
1867 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1868 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1869 }
1870
1871 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1872 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1873}
1874
1875
1876/**
1877 * @opcode 0x45
1878 * @opflclass incdec
1879 */
1880FNIEMOP_DEF(iemOp_inc_eBP)
1881{
1882 /*
1883 * This is a REX prefix in 64-bit mode.
1884 */
1885 if (IEM_IS_64BIT_CODE(pVCpu))
1886 {
1887 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1888 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1889 pVCpu->iem.s.uRexReg = 1 << 3;
1890 pVCpu->iem.s.uRexB = 1 << 3;
1891
1892 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1893 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1894 }
1895
1896 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1897 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1898}
1899
1900
1901/**
1902 * @opcode 0x46
1903 * @opflclass incdec
1904 */
1905FNIEMOP_DEF(iemOp_inc_eSI)
1906{
1907 /*
1908 * This is a REX prefix in 64-bit mode.
1909 */
1910 if (IEM_IS_64BIT_CODE(pVCpu))
1911 {
1912 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1913 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1914 pVCpu->iem.s.uRexReg = 1 << 3;
1915 pVCpu->iem.s.uRexIndex = 1 << 3;
1916
1917 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1918 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1919 }
1920
1921 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1922 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1923}
1924
1925
1926/**
1927 * @opcode 0x47
1928 * @opflclass incdec
1929 */
1930FNIEMOP_DEF(iemOp_inc_eDI)
1931{
1932 /*
1933 * This is a REX prefix in 64-bit mode.
1934 */
1935 if (IEM_IS_64BIT_CODE(pVCpu))
1936 {
1937 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1938 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1939 pVCpu->iem.s.uRexReg = 1 << 3;
1940 pVCpu->iem.s.uRexB = 1 << 3;
1941 pVCpu->iem.s.uRexIndex = 1 << 3;
1942
1943 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1944 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1945 }
1946
1947 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1948 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
1949}
1950
1951
1952/**
1953 * @opcode 0x48
1954 * @opflclass incdec
1955 */
1956FNIEMOP_DEF(iemOp_dec_eAX)
1957{
1958 /*
1959 * This is a REX prefix in 64-bit mode.
1960 */
1961 if (IEM_IS_64BIT_CODE(pVCpu))
1962 {
1963 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1964 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1965 iemRecalEffOpSize(pVCpu);
1966
1967 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1968 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1969 }
1970
1971 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1972 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
1973}
1974
1975
1976/**
1977 * @opcode 0x49
1978 * @opflclass incdec
1979 */
1980FNIEMOP_DEF(iemOp_dec_eCX)
1981{
1982 /*
1983 * This is a REX prefix in 64-bit mode.
1984 */
1985 if (IEM_IS_64BIT_CODE(pVCpu))
1986 {
1987 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1988 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1989 pVCpu->iem.s.uRexB = 1 << 3;
1990 iemRecalEffOpSize(pVCpu);
1991
1992 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1993 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1994 }
1995
1996 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1997 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
1998}
1999
2000
2001/**
2002 * @opcode 0x4a
2003 * @opflclass incdec
2004 */
2005FNIEMOP_DEF(iemOp_dec_eDX)
2006{
2007 /*
2008 * This is a REX prefix in 64-bit mode.
2009 */
2010 if (IEM_IS_64BIT_CODE(pVCpu))
2011 {
2012 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2013 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2014 pVCpu->iem.s.uRexIndex = 1 << 3;
2015 iemRecalEffOpSize(pVCpu);
2016
2017 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2018 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2019 }
2020
2021 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2022 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2023}
2024
2025
2026/**
2027 * @opcode 0x4b
2028 * @opflclass incdec
2029 */
2030FNIEMOP_DEF(iemOp_dec_eBX)
2031{
2032 /*
2033 * This is a REX prefix in 64-bit mode.
2034 */
2035 if (IEM_IS_64BIT_CODE(pVCpu))
2036 {
2037 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2038 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2039 pVCpu->iem.s.uRexB = 1 << 3;
2040 pVCpu->iem.s.uRexIndex = 1 << 3;
2041 iemRecalEffOpSize(pVCpu);
2042
2043 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2044 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2045 }
2046
2047 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2048 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2049}
2050
2051
2052/**
2053 * @opcode 0x4c
2054 * @opflclass incdec
2055 */
2056FNIEMOP_DEF(iemOp_dec_eSP)
2057{
2058 /*
2059 * This is a REX prefix in 64-bit mode.
2060 */
2061 if (IEM_IS_64BIT_CODE(pVCpu))
2062 {
2063 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2064 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2065 pVCpu->iem.s.uRexReg = 1 << 3;
2066 iemRecalEffOpSize(pVCpu);
2067
2068 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2069 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2070 }
2071
2072 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2073 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2074}
2075
2076
2077/**
2078 * @opcode 0x4d
2079 * @opflclass incdec
2080 */
2081FNIEMOP_DEF(iemOp_dec_eBP)
2082{
2083 /*
2084 * This is a REX prefix in 64-bit mode.
2085 */
2086 if (IEM_IS_64BIT_CODE(pVCpu))
2087 {
2088 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2089 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2090 pVCpu->iem.s.uRexReg = 1 << 3;
2091 pVCpu->iem.s.uRexB = 1 << 3;
2092 iemRecalEffOpSize(pVCpu);
2093
2094 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2095 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2096 }
2097
2098 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2099 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2100}
2101
2102
2103/**
2104 * @opcode 0x4e
2105 * @opflclass incdec
2106 */
2107FNIEMOP_DEF(iemOp_dec_eSI)
2108{
2109 /*
2110 * This is a REX prefix in 64-bit mode.
2111 */
2112 if (IEM_IS_64BIT_CODE(pVCpu))
2113 {
2114 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2115 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2116 pVCpu->iem.s.uRexReg = 1 << 3;
2117 pVCpu->iem.s.uRexIndex = 1 << 3;
2118 iemRecalEffOpSize(pVCpu);
2119
2120 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2121 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2122 }
2123
2124 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2125 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2126}
2127
2128
2129/**
2130 * @opcode 0x4f
2131 * @opflclass incdec
2132 */
2133FNIEMOP_DEF(iemOp_dec_eDI)
2134{
2135 /*
2136 * This is a REX prefix in 64-bit mode.
2137 */
2138 if (IEM_IS_64BIT_CODE(pVCpu))
2139 {
2140 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2141 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2142 pVCpu->iem.s.uRexReg = 1 << 3;
2143 pVCpu->iem.s.uRexB = 1 << 3;
2144 pVCpu->iem.s.uRexIndex = 1 << 3;
2145 iemRecalEffOpSize(pVCpu);
2146
2147 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2148 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2149 }
2150
2151 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2152 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2153}
2154
2155
2156/**
2157 * Common 'push register' helper.
2158 */
2159FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2160{
2161 if (IEM_IS_64BIT_CODE(pVCpu))
2162 {
2163 iReg |= pVCpu->iem.s.uRexB;
2164 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2165 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2166 }
2167
2168 switch (pVCpu->iem.s.enmEffOpSize)
2169 {
2170 case IEMMODE_16BIT:
2171 IEM_MC_BEGIN(0, 1, 0, 0);
2172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2173 IEM_MC_LOCAL(uint16_t, u16Value);
2174 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2175 IEM_MC_PUSH_U16(u16Value);
2176 IEM_MC_ADVANCE_RIP_AND_FINISH();
2177 IEM_MC_END();
2178 break;
2179
2180 case IEMMODE_32BIT:
2181 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2183 IEM_MC_LOCAL(uint32_t, u32Value);
2184 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2185 IEM_MC_PUSH_U32(u32Value);
2186 IEM_MC_ADVANCE_RIP_AND_FINISH();
2187 IEM_MC_END();
2188 break;
2189
2190 case IEMMODE_64BIT:
2191 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2193 IEM_MC_LOCAL(uint64_t, u64Value);
2194 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2195 IEM_MC_PUSH_U64(u64Value);
2196 IEM_MC_ADVANCE_RIP_AND_FINISH();
2197 IEM_MC_END();
2198 break;
2199
2200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2201 }
2202}
2203
2204
2205/**
2206 * @opcode 0x50
2207 */
2208FNIEMOP_DEF(iemOp_push_eAX)
2209{
2210 IEMOP_MNEMONIC(push_rAX, "push rAX");
2211 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2212}
2213
2214
2215/**
2216 * @opcode 0x51
2217 */
2218FNIEMOP_DEF(iemOp_push_eCX)
2219{
2220 IEMOP_MNEMONIC(push_rCX, "push rCX");
2221 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2222}
2223
2224
2225/**
2226 * @opcode 0x52
2227 */
2228FNIEMOP_DEF(iemOp_push_eDX)
2229{
2230 IEMOP_MNEMONIC(push_rDX, "push rDX");
2231 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2232}
2233
2234
2235/**
2236 * @opcode 0x53
2237 */
2238FNIEMOP_DEF(iemOp_push_eBX)
2239{
2240 IEMOP_MNEMONIC(push_rBX, "push rBX");
2241 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2242}
2243
2244
2245/**
2246 * @opcode 0x54
2247 */
2248FNIEMOP_DEF(iemOp_push_eSP)
2249{
2250 IEMOP_MNEMONIC(push_rSP, "push rSP");
2251 if (IEM_GET_TARGET_CPU(pVCpu) != IEMTARGETCPU_8086)
2252 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2253
2254 /* 8086 works differently wrt to 'push sp' compared to 80186 and later. */
2255 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086, 0);
2256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2257 IEM_MC_LOCAL(uint16_t, u16Value);
2258 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2259 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2260 IEM_MC_PUSH_U16(u16Value);
2261 IEM_MC_ADVANCE_RIP_AND_FINISH();
2262 IEM_MC_END();
2263}
2264
2265
2266/**
2267 * @opcode 0x55
2268 */
2269FNIEMOP_DEF(iemOp_push_eBP)
2270{
2271 IEMOP_MNEMONIC(push_rBP, "push rBP");
2272 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2273}
2274
2275
2276/**
2277 * @opcode 0x56
2278 */
2279FNIEMOP_DEF(iemOp_push_eSI)
2280{
2281 IEMOP_MNEMONIC(push_rSI, "push rSI");
2282 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2283}
2284
2285
2286/**
2287 * @opcode 0x57
2288 */
2289FNIEMOP_DEF(iemOp_push_eDI)
2290{
2291 IEMOP_MNEMONIC(push_rDI, "push rDI");
2292 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2293}
2294
2295
2296/**
2297 * Common 'pop register' helper.
2298 */
2299FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2300{
2301 if (IEM_IS_64BIT_CODE(pVCpu))
2302 {
2303 iReg |= pVCpu->iem.s.uRexB;
2304 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2305 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2306 }
2307
2308 switch (pVCpu->iem.s.enmEffOpSize)
2309 {
2310 case IEMMODE_16BIT:
2311 IEM_MC_BEGIN(0, 0, 0, 0);
2312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2313 IEM_MC_POP_GREG_U16(iReg);
2314 IEM_MC_ADVANCE_RIP_AND_FINISH();
2315 IEM_MC_END();
2316 break;
2317
2318 case IEMMODE_32BIT:
2319 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2321 IEM_MC_POP_GREG_U32(iReg);
2322 IEM_MC_ADVANCE_RIP_AND_FINISH();
2323 IEM_MC_END();
2324 break;
2325
2326 case IEMMODE_64BIT:
2327 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
2328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2329 IEM_MC_POP_GREG_U64(iReg);
2330 IEM_MC_ADVANCE_RIP_AND_FINISH();
2331 IEM_MC_END();
2332 break;
2333
2334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2335 }
2336}
2337
2338
2339/**
2340 * @opcode 0x58
2341 */
2342FNIEMOP_DEF(iemOp_pop_eAX)
2343{
2344 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2345 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2346}
2347
2348
2349/**
2350 * @opcode 0x59
2351 */
2352FNIEMOP_DEF(iemOp_pop_eCX)
2353{
2354 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2355 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2356}
2357
2358
2359/**
2360 * @opcode 0x5a
2361 */
2362FNIEMOP_DEF(iemOp_pop_eDX)
2363{
2364 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2365 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2366}
2367
2368
2369/**
2370 * @opcode 0x5b
2371 */
2372FNIEMOP_DEF(iemOp_pop_eBX)
2373{
2374 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2375 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2376}
2377
2378
2379/**
2380 * @opcode 0x5c
2381 */
2382FNIEMOP_DEF(iemOp_pop_eSP)
2383{
2384 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2385 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2386}
2387
2388
2389/**
2390 * @opcode 0x5d
2391 */
2392FNIEMOP_DEF(iemOp_pop_eBP)
2393{
2394 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2395 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2396}
2397
2398
2399/**
2400 * @opcode 0x5e
2401 */
2402FNIEMOP_DEF(iemOp_pop_eSI)
2403{
2404 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2405 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2406}
2407
2408
2409/**
2410 * @opcode 0x5f
2411 */
2412FNIEMOP_DEF(iemOp_pop_eDI)
2413{
2414 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2415 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2416}
2417
2418
2419/**
2420 * @opcode 0x60
2421 */
2422FNIEMOP_DEF(iemOp_pusha)
2423{
2424 IEMOP_MNEMONIC(pusha, "pusha");
2425 IEMOP_HLP_MIN_186();
2426 IEMOP_HLP_NO_64BIT();
2427 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2428 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_16);
2429 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2430 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_32);
2431}
2432
2433
2434/**
2435 * @opcode 0x61
2436 */
2437FNIEMOP_DEF(iemOp_popa__mvex)
2438{
2439 if (!IEM_IS_64BIT_CODE(pVCpu))
2440 {
2441 IEMOP_MNEMONIC(popa, "popa");
2442 IEMOP_HLP_MIN_186();
2443 IEMOP_HLP_NO_64BIT();
2444 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2445 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2446 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2447 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2448 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2449 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2450 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2451 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2452 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2453 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2454 iemCImpl_popa_16);
2455 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2456 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2457 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2458 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2459 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2460 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2461 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2462 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2463 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2464 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2465 iemCImpl_popa_32);
2466 }
2467 IEMOP_MNEMONIC(mvex, "mvex");
2468 Log(("mvex prefix is not supported!\n"));
2469 IEMOP_RAISE_INVALID_OPCODE_RET();
2470}
2471
2472
2473/**
2474 * @opcode 0x62
2475 * @opmnemonic bound
2476 * @op1 Gv_RO
2477 * @op2 Ma
2478 * @opmincpu 80186
2479 * @ophints harmless x86_invalid_64
2480 * @optest op1=0 op2=0 ->
2481 * @optest op1=1 op2=0 -> value.xcpt=5
2482 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2483 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2484 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2485 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2486 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2487 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2488 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2489 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2490 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2491 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2492 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2493 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2494 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2495 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2496 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2497 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2498 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2499 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2500 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2501 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2502 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2503 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2504 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2505 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2506 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2507 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2508 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2509 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2510 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2511 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2512 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2513 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2514 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2515 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2516 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2517 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2518 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2519 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2520 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2521 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2522 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2523 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2524 */
2525FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2526{
2527 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2528 compatability mode it is invalid with MOD=3.
2529
2530 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2531 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2532 given as R and X without an exact description, so we assume it builds on
2533 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2534 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2535 uint8_t bRm;
2536 if (!IEM_IS_64BIT_CODE(pVCpu))
2537 {
2538 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2539 IEMOP_HLP_MIN_186();
2540 IEM_OPCODE_GET_NEXT_U8(&bRm);
2541 if (IEM_IS_MODRM_MEM_MODE(bRm))
2542 {
2543 /** @todo testcase: check that there are two memory accesses involved. Check
2544 * whether they're both read before the \#BR triggers. */
2545 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2546 {
2547 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
2548 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2549 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2550 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2552
2553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2555
2556 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2557 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2558 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2559
2560 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2561 IEM_MC_END();
2562 }
2563 else /* 32-bit operands */
2564 {
2565 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2566 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2567 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2568 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2570
2571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2573
2574 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2575 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2576 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2577
2578 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2579 IEM_MC_END();
2580 }
2581 }
2582
2583 /*
2584 * @opdone
2585 */
2586 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2587 {
2588 /* Note that there is no need for the CPU to fetch further bytes
2589 here because MODRM.MOD == 3. */
2590 Log(("evex not supported by the guest CPU!\n"));
2591 IEMOP_RAISE_INVALID_OPCODE_RET();
2592 }
2593 }
2594 else
2595 {
2596 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2597 * does modr/m read, whereas AMD probably doesn't... */
2598 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2599 {
2600 Log(("evex not supported by the guest CPU!\n"));
2601 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2602 }
2603 IEM_OPCODE_GET_NEXT_U8(&bRm);
2604 }
2605
2606 IEMOP_MNEMONIC(evex, "evex");
2607 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2608 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2609 Log(("evex prefix is not implemented!\n"));
2610 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2611}
2612
2613
2614/**
2615 * @opcode 0x63
2616 * @opflmodify zf
2617 * @note non-64-bit modes.
2618 */
2619FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2620{
2621 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2622 IEMOP_HLP_MIN_286();
2623 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2624 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2625
2626 if (IEM_IS_MODRM_REG_MODE(bRm))
2627 {
2628 /* Register */
2629 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2630 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2631 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2632 IEM_MC_ARG(uint16_t, u16Src, 1);
2633 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2634
2635 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2636 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2637 IEM_MC_REF_EFLAGS(pEFlags);
2638 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2639
2640 IEM_MC_ADVANCE_RIP_AND_FINISH();
2641 IEM_MC_END();
2642 }
2643 else
2644 {
2645 /* Memory */
2646 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2647 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2648 IEM_MC_ARG(uint16_t, u16Src, 1);
2649 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2651 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2652
2653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2654 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2655 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2656 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2657 IEM_MC_FETCH_EFLAGS(EFlags);
2658 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2659
2660 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
2661 IEM_MC_COMMIT_EFLAGS(EFlags);
2662 IEM_MC_ADVANCE_RIP_AND_FINISH();
2663 IEM_MC_END();
2664 }
2665}
2666
2667
2668/**
2669 * @opcode 0x63
2670 *
2671 * @note This is a weird one. It works like a regular move instruction if
2672 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2673 * @todo This definitely needs a testcase to verify the odd cases. */
2674FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2675{
2676 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2677
2678 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2680
2681 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2682 {
2683 if (IEM_IS_MODRM_REG_MODE(bRm))
2684 {
2685 /*
2686 * Register to register.
2687 */
2688 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2690 IEM_MC_LOCAL(uint64_t, u64Value);
2691 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2692 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2693 IEM_MC_ADVANCE_RIP_AND_FINISH();
2694 IEM_MC_END();
2695 }
2696 else
2697 {
2698 /*
2699 * We're loading a register from memory.
2700 */
2701 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
2702 IEM_MC_LOCAL(uint64_t, u64Value);
2703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2706 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2707 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2708 IEM_MC_ADVANCE_RIP_AND_FINISH();
2709 IEM_MC_END();
2710 }
2711 }
2712 else
2713 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2714}
2715
2716
2717/**
2718 * @opcode 0x64
2719 * @opmnemonic segfs
2720 * @opmincpu 80386
2721 * @opgroup og_prefixes
2722 */
2723FNIEMOP_DEF(iemOp_seg_FS)
2724{
2725 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2726 IEMOP_HLP_MIN_386();
2727
2728 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2729 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2730
2731 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2732 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2733}
2734
2735
2736/**
2737 * @opcode 0x65
2738 * @opmnemonic seggs
2739 * @opmincpu 80386
2740 * @opgroup og_prefixes
2741 */
2742FNIEMOP_DEF(iemOp_seg_GS)
2743{
2744 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2745 IEMOP_HLP_MIN_386();
2746
2747 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2748 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2749
2750 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2751 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2752}
2753
2754
2755/**
2756 * @opcode 0x66
2757 * @opmnemonic opsize
2758 * @openc prefix
2759 * @opmincpu 80386
2760 * @ophints harmless
2761 * @opgroup og_prefixes
2762 */
2763FNIEMOP_DEF(iemOp_op_size)
2764{
2765 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2766 IEMOP_HLP_MIN_386();
2767
2768 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2769 iemRecalEffOpSize(pVCpu);
2770
2771 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2772 when REPZ or REPNZ are present. */
2773 if (pVCpu->iem.s.idxPrefix == 0)
2774 pVCpu->iem.s.idxPrefix = 1;
2775
2776 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2777 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2778}
2779
2780
2781/**
2782 * @opcode 0x67
2783 * @opmnemonic addrsize
2784 * @openc prefix
2785 * @opmincpu 80386
2786 * @ophints harmless
2787 * @opgroup og_prefixes
2788 */
2789FNIEMOP_DEF(iemOp_addr_size)
2790{
2791 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2792 IEMOP_HLP_MIN_386();
2793
2794 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2795 switch (pVCpu->iem.s.enmDefAddrMode)
2796 {
2797 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2798 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2799 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2800 default: AssertFailed();
2801 }
2802
2803 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2804 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2805}
2806
2807
2808/**
2809 * @opcode 0x68
2810 */
2811FNIEMOP_DEF(iemOp_push_Iz)
2812{
2813 IEMOP_MNEMONIC(push_Iz, "push Iz");
2814 IEMOP_HLP_MIN_186();
2815 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2816 switch (pVCpu->iem.s.enmEffOpSize)
2817 {
2818 case IEMMODE_16BIT:
2819 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
2820 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2822 IEM_MC_LOCAL_CONST(uint16_t, u16Value, u16Imm);
2823 IEM_MC_PUSH_U16(u16Value);
2824 IEM_MC_ADVANCE_RIP_AND_FINISH();
2825 IEM_MC_END();
2826 break;
2827
2828 case IEMMODE_32BIT:
2829 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2830 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2832 IEM_MC_LOCAL_CONST(uint32_t, u32Value, u32Imm);
2833 IEM_MC_PUSH_U32(u32Value);
2834 IEM_MC_ADVANCE_RIP_AND_FINISH();
2835 IEM_MC_END();
2836 break;
2837
2838 case IEMMODE_64BIT:
2839 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2840 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2842 IEM_MC_LOCAL_CONST(uint64_t, u64Value, u64Imm);
2843 IEM_MC_PUSH_U64(u64Value);
2844 IEM_MC_ADVANCE_RIP_AND_FINISH();
2845 IEM_MC_END();
2846 break;
2847
2848 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2849 }
2850}
2851
2852
2853/**
2854 * @opcode 0x69
2855 * @opflclass multiply
2856 */
2857FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2858{
2859 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2860 IEMOP_HLP_MIN_186();
2861 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2862 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2863
2864 switch (pVCpu->iem.s.enmEffOpSize)
2865 {
2866 case IEMMODE_16BIT:
2867 {
2868 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2869 if (IEM_IS_MODRM_REG_MODE(bRm))
2870 {
2871 /* register operand */
2872 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2873 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
2874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2875 IEM_MC_LOCAL(uint16_t, u16Tmp);
2876 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2877 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2878 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
2879 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2880 IEM_MC_REF_EFLAGS(pEFlags);
2881 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2882 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2883
2884 IEM_MC_ADVANCE_RIP_AND_FINISH();
2885 IEM_MC_END();
2886 }
2887 else
2888 {
2889 /* memory operand */
2890 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
2891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2893
2894 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2896
2897 IEM_MC_LOCAL(uint16_t, u16Tmp);
2898 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2899
2900 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2901 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
2902 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2903 IEM_MC_REF_EFLAGS(pEFlags);
2904 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2905 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2906
2907 IEM_MC_ADVANCE_RIP_AND_FINISH();
2908 IEM_MC_END();
2909 }
2910 break;
2911 }
2912
2913 case IEMMODE_32BIT:
2914 {
2915 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2916 if (IEM_IS_MODRM_REG_MODE(bRm))
2917 {
2918 /* register operand */
2919 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2920 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
2921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2922 IEM_MC_LOCAL(uint32_t, u32Tmp);
2923 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2924
2925 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
2926 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
2927 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2928 IEM_MC_REF_EFLAGS(pEFlags);
2929 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2930 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2931
2932 IEM_MC_ADVANCE_RIP_AND_FINISH();
2933 IEM_MC_END();
2934 }
2935 else
2936 {
2937 /* memory operand */
2938 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
2939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2941
2942 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2944
2945 IEM_MC_LOCAL(uint32_t, u32Tmp);
2946 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2947
2948 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
2949 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
2950 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2951 IEM_MC_REF_EFLAGS(pEFlags);
2952 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2953 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2954
2955 IEM_MC_ADVANCE_RIP_AND_FINISH();
2956 IEM_MC_END();
2957 }
2958 break;
2959 }
2960
2961 case IEMMODE_64BIT:
2962 {
2963 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
2964 if (IEM_IS_MODRM_REG_MODE(bRm))
2965 {
2966 /* register operand */
2967 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2968 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
2969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2970 IEM_MC_LOCAL(uint64_t, u64Tmp);
2971 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2972
2973 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
2974 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
2975 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2976 IEM_MC_REF_EFLAGS(pEFlags);
2977 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
2978 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2979
2980 IEM_MC_ADVANCE_RIP_AND_FINISH();
2981 IEM_MC_END();
2982 }
2983 else
2984 {
2985 /* memory operand */
2986 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
2987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2989
2990 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
2991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /* parameter count for the threaded function for this block. */
2992
2993 IEM_MC_LOCAL(uint64_t, u64Tmp);
2994 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2995
2996 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
2997 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int32_t)u32Imm, 1);
2998 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2999 IEM_MC_REF_EFLAGS(pEFlags);
3000 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3001 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3002
3003 IEM_MC_ADVANCE_RIP_AND_FINISH();
3004 IEM_MC_END();
3005 }
3006 break;
3007 }
3008
3009 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3010 }
3011}
3012
3013
3014/**
3015 * @opcode 0x6a
3016 */
3017FNIEMOP_DEF(iemOp_push_Ib)
3018{
3019 IEMOP_MNEMONIC(push_Ib, "push Ib");
3020 IEMOP_HLP_MIN_186();
3021 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3022 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3023
3024 switch (pVCpu->iem.s.enmEffOpSize)
3025 {
3026 case IEMMODE_16BIT:
3027 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_186, 0);
3028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3029 IEM_MC_LOCAL_CONST(uint16_t, uValue, (int16_t)i8Imm);
3030 IEM_MC_PUSH_U16(uValue);
3031 IEM_MC_ADVANCE_RIP_AND_FINISH();
3032 IEM_MC_END();
3033 break;
3034 case IEMMODE_32BIT:
3035 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3037 IEM_MC_LOCAL_CONST(uint32_t, uValue, (int32_t)i8Imm);
3038 IEM_MC_PUSH_U32(uValue);
3039 IEM_MC_ADVANCE_RIP_AND_FINISH();
3040 IEM_MC_END();
3041 break;
3042 case IEMMODE_64BIT:
3043 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
3044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3045 IEM_MC_LOCAL_CONST(uint64_t, uValue, (int64_t)i8Imm);
3046 IEM_MC_PUSH_U64(uValue);
3047 IEM_MC_ADVANCE_RIP_AND_FINISH();
3048 IEM_MC_END();
3049 break;
3050 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3051 }
3052}
3053
3054
3055/**
3056 * @opcode 0x6b
3057 * @opflclass multiply
3058 */
3059FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3060{
3061 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3062 IEMOP_HLP_MIN_186();
3063 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3064 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3065
3066 switch (pVCpu->iem.s.enmEffOpSize)
3067 {
3068 case IEMMODE_16BIT:
3069 {
3070 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3071 if (IEM_IS_MODRM_REG_MODE(bRm))
3072 {
3073 /* register operand */
3074 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3075 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3077
3078 IEM_MC_LOCAL(uint16_t, u16Tmp);
3079 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3080
3081 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3082 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3083 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3084 IEM_MC_REF_EFLAGS(pEFlags);
3085 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3086 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3087
3088 IEM_MC_ADVANCE_RIP_AND_FINISH();
3089 IEM_MC_END();
3090 }
3091 else
3092 {
3093 /* memory operand */
3094 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3095
3096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3098
3099 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3101
3102 IEM_MC_LOCAL(uint16_t, u16Tmp);
3103 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3104
3105 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3106 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3107 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3108 IEM_MC_REF_EFLAGS(pEFlags);
3109 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3110 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3111
3112 IEM_MC_ADVANCE_RIP_AND_FINISH();
3113 IEM_MC_END();
3114 }
3115 break;
3116 }
3117
3118 case IEMMODE_32BIT:
3119 {
3120 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3121 if (IEM_IS_MODRM_REG_MODE(bRm))
3122 {
3123 /* register operand */
3124 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3125 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3127 IEM_MC_LOCAL(uint32_t, u32Tmp);
3128 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3129
3130 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3131 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3133 IEM_MC_REF_EFLAGS(pEFlags);
3134 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3135 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3136
3137 IEM_MC_ADVANCE_RIP_AND_FINISH();
3138 IEM_MC_END();
3139 }
3140 else
3141 {
3142 /* memory operand */
3143 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3144 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3146
3147 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3149
3150 IEM_MC_LOCAL(uint32_t, u32Tmp);
3151 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3152
3153 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3154 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3155 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3156 IEM_MC_REF_EFLAGS(pEFlags);
3157 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3158 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3159
3160 IEM_MC_ADVANCE_RIP_AND_FINISH();
3161 IEM_MC_END();
3162 }
3163 break;
3164 }
3165
3166 case IEMMODE_64BIT:
3167 {
3168 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3169 if (IEM_IS_MODRM_REG_MODE(bRm))
3170 {
3171 /* register operand */
3172 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3173 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3175 IEM_MC_LOCAL(uint64_t, u64Tmp);
3176 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3177
3178 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3179 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3180 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3181 IEM_MC_REF_EFLAGS(pEFlags);
3182 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3183 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3184
3185 IEM_MC_ADVANCE_RIP_AND_FINISH();
3186 IEM_MC_END();
3187 }
3188 else
3189 {
3190 /* memory operand */
3191 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3194
3195 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the threaded parameter count. */
3196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3197
3198 IEM_MC_LOCAL(uint64_t, u64Tmp);
3199 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3200
3201 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3202 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3203 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3204 IEM_MC_REF_EFLAGS(pEFlags);
3205 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3206 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3207
3208 IEM_MC_ADVANCE_RIP_AND_FINISH();
3209 IEM_MC_END();
3210 }
3211 break;
3212 }
3213
3214 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3215 }
3216}
3217
3218
3219/**
3220 * @opcode 0x6c
3221 * @opfltest iopl,df
3222 */
3223FNIEMOP_DEF(iemOp_insb_Yb_DX)
3224{
3225 IEMOP_HLP_MIN_186();
3226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3227 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3228 {
3229 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3230 switch (pVCpu->iem.s.enmEffAddrMode)
3231 {
3232 case IEMMODE_16BIT:
3233 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3234 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3235 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3236 iemCImpl_rep_ins_op8_addr16, false);
3237 case IEMMODE_32BIT:
3238 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3239 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3240 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3241 iemCImpl_rep_ins_op8_addr32, false);
3242 case IEMMODE_64BIT:
3243 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3244 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3245 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3246 iemCImpl_rep_ins_op8_addr64, false);
3247 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3248 }
3249 }
3250 else
3251 {
3252 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3253 switch (pVCpu->iem.s.enmEffAddrMode)
3254 {
3255 case IEMMODE_16BIT:
3256 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3257 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3258 iemCImpl_ins_op8_addr16, false);
3259 case IEMMODE_32BIT:
3260 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3261 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3262 iemCImpl_ins_op8_addr32, false);
3263 case IEMMODE_64BIT:
3264 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3265 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3266 iemCImpl_ins_op8_addr64, false);
3267 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3268 }
3269 }
3270}
3271
3272
3273/**
3274 * @opcode 0x6d
3275 * @opfltest iopl,df
3276 */
3277FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3278{
3279 IEMOP_HLP_MIN_186();
3280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3281 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3282 {
3283 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3284 switch (pVCpu->iem.s.enmEffOpSize)
3285 {
3286 case IEMMODE_16BIT:
3287 switch (pVCpu->iem.s.enmEffAddrMode)
3288 {
3289 case IEMMODE_16BIT:
3290 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3291 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3292 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3293 iemCImpl_rep_ins_op16_addr16, false);
3294 case IEMMODE_32BIT:
3295 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3296 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3297 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3298 iemCImpl_rep_ins_op16_addr32, false);
3299 case IEMMODE_64BIT:
3300 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3301 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3302 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3303 iemCImpl_rep_ins_op16_addr64, false);
3304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3305 }
3306 break;
3307 case IEMMODE_64BIT:
3308 case IEMMODE_32BIT:
3309 switch (pVCpu->iem.s.enmEffAddrMode)
3310 {
3311 case IEMMODE_16BIT:
3312 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3313 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3314 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3315 iemCImpl_rep_ins_op32_addr16, false);
3316 case IEMMODE_32BIT:
3317 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3318 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3319 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3320 iemCImpl_rep_ins_op32_addr32, false);
3321 case IEMMODE_64BIT:
3322 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3323 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3324 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3325 iemCImpl_rep_ins_op32_addr64, false);
3326 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3327 }
3328 break;
3329 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3330 }
3331 }
3332 else
3333 {
3334 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3335 switch (pVCpu->iem.s.enmEffOpSize)
3336 {
3337 case IEMMODE_16BIT:
3338 switch (pVCpu->iem.s.enmEffAddrMode)
3339 {
3340 case IEMMODE_16BIT:
3341 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3342 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3343 iemCImpl_ins_op16_addr16, false);
3344 case IEMMODE_32BIT:
3345 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3346 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3347 iemCImpl_ins_op16_addr32, false);
3348 case IEMMODE_64BIT:
3349 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3350 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3351 iemCImpl_ins_op16_addr64, false);
3352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3353 }
3354 break;
3355 case IEMMODE_64BIT:
3356 case IEMMODE_32BIT:
3357 switch (pVCpu->iem.s.enmEffAddrMode)
3358 {
3359 case IEMMODE_16BIT:
3360 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3361 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3362 iemCImpl_ins_op32_addr16, false);
3363 case IEMMODE_32BIT:
3364 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3365 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3366 iemCImpl_ins_op32_addr32, false);
3367 case IEMMODE_64BIT:
3368 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3369 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3370 iemCImpl_ins_op32_addr64, false);
3371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3372 }
3373 break;
3374 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3375 }
3376 }
3377}
3378
3379
3380/**
3381 * @opcode 0x6e
3382 * @opfltest iopl,df
3383 */
3384FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3385{
3386 IEMOP_HLP_MIN_186();
3387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3388 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3389 {
3390 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3391 switch (pVCpu->iem.s.enmEffAddrMode)
3392 {
3393 case IEMMODE_16BIT:
3394 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3395 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3396 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3397 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3398 case IEMMODE_32BIT:
3399 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3400 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3401 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3402 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3403 case IEMMODE_64BIT:
3404 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3405 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3406 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3407 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3408 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3409 }
3410 }
3411 else
3412 {
3413 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3414 switch (pVCpu->iem.s.enmEffAddrMode)
3415 {
3416 case IEMMODE_16BIT:
3417 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3418 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3419 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3420 case IEMMODE_32BIT:
3421 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3422 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3423 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3424 case IEMMODE_64BIT:
3425 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3426 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3427 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3429 }
3430 }
3431}
3432
3433
3434/**
3435 * @opcode 0x6f
3436 * @opfltest iopl,df
3437 */
3438FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3439{
3440 IEMOP_HLP_MIN_186();
3441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3442 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3443 {
3444 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3445 switch (pVCpu->iem.s.enmEffOpSize)
3446 {
3447 case IEMMODE_16BIT:
3448 switch (pVCpu->iem.s.enmEffAddrMode)
3449 {
3450 case IEMMODE_16BIT:
3451 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3452 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3453 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3454 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3455 case IEMMODE_32BIT:
3456 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3457 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3458 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3459 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3460 case IEMMODE_64BIT:
3461 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3462 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3463 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3464 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3465 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3466 }
3467 break;
3468 case IEMMODE_64BIT:
3469 case IEMMODE_32BIT:
3470 switch (pVCpu->iem.s.enmEffAddrMode)
3471 {
3472 case IEMMODE_16BIT:
3473 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3474 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3475 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3476 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3477 case IEMMODE_32BIT:
3478 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3479 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3480 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3481 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3482 case IEMMODE_64BIT:
3483 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3484 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3485 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3486 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3488 }
3489 break;
3490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3491 }
3492 }
3493 else
3494 {
3495 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3496 switch (pVCpu->iem.s.enmEffOpSize)
3497 {
3498 case IEMMODE_16BIT:
3499 switch (pVCpu->iem.s.enmEffAddrMode)
3500 {
3501 case IEMMODE_16BIT:
3502 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3503 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3504 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3505 case IEMMODE_32BIT:
3506 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3507 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3508 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3509 case IEMMODE_64BIT:
3510 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3511 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3512 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3513 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3514 }
3515 break;
3516 case IEMMODE_64BIT:
3517 case IEMMODE_32BIT:
3518 switch (pVCpu->iem.s.enmEffAddrMode)
3519 {
3520 case IEMMODE_16BIT:
3521 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3522 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3523 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3524 case IEMMODE_32BIT:
3525 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3526 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3527 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3528 case IEMMODE_64BIT:
3529 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3530 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3531 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3532 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3533 }
3534 break;
3535 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3536 }
3537 }
3538}
3539
3540
3541/**
3542 * @opcode 0x70
3543 * @opfltest of
3544 */
3545FNIEMOP_DEF(iemOp_jo_Jb)
3546{
3547 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3548 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3549 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3550
3551 IEM_MC_BEGIN(0, 0, 0, 0);
3552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3553 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3554 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3555 } IEM_MC_ELSE() {
3556 IEM_MC_ADVANCE_RIP_AND_FINISH();
3557 } IEM_MC_ENDIF();
3558 IEM_MC_END();
3559}
3560
3561
3562/**
3563 * @opcode 0x71
3564 * @opfltest of
3565 */
3566FNIEMOP_DEF(iemOp_jno_Jb)
3567{
3568 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3569 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3570 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3571
3572 IEM_MC_BEGIN(0, 0, 0, 0);
3573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3574 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3575 IEM_MC_ADVANCE_RIP_AND_FINISH();
3576 } IEM_MC_ELSE() {
3577 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3578 } IEM_MC_ENDIF();
3579 IEM_MC_END();
3580}
3581
3582/**
3583 * @opcode 0x72
3584 * @opfltest cf
3585 */
3586FNIEMOP_DEF(iemOp_jc_Jb)
3587{
3588 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3589 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3590 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3591
3592 IEM_MC_BEGIN(0, 0, 0, 0);
3593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3594 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3595 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3596 } IEM_MC_ELSE() {
3597 IEM_MC_ADVANCE_RIP_AND_FINISH();
3598 } IEM_MC_ENDIF();
3599 IEM_MC_END();
3600}
3601
3602
3603/**
3604 * @opcode 0x73
3605 * @opfltest cf
3606 */
3607FNIEMOP_DEF(iemOp_jnc_Jb)
3608{
3609 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3610 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3611 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3612
3613 IEM_MC_BEGIN(0, 0, 0, 0);
3614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3615 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3616 IEM_MC_ADVANCE_RIP_AND_FINISH();
3617 } IEM_MC_ELSE() {
3618 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3619 } IEM_MC_ENDIF();
3620 IEM_MC_END();
3621}
3622
3623
3624/**
3625 * @opcode 0x74
3626 * @opfltest zf
3627 */
3628FNIEMOP_DEF(iemOp_je_Jb)
3629{
3630 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3631 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3632 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3633
3634 IEM_MC_BEGIN(0, 0, 0, 0);
3635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3636 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3637 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3638 } IEM_MC_ELSE() {
3639 IEM_MC_ADVANCE_RIP_AND_FINISH();
3640 } IEM_MC_ENDIF();
3641 IEM_MC_END();
3642}
3643
3644
3645/**
3646 * @opcode 0x75
3647 * @opfltest zf
3648 */
3649FNIEMOP_DEF(iemOp_jne_Jb)
3650{
3651 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3652 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3653 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3654
3655 IEM_MC_BEGIN(0, 0, 0, 0);
3656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3657 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3658 IEM_MC_ADVANCE_RIP_AND_FINISH();
3659 } IEM_MC_ELSE() {
3660 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3661 } IEM_MC_ENDIF();
3662 IEM_MC_END();
3663}
3664
3665
3666/**
3667 * @opcode 0x76
3668 * @opfltest cf,zf
3669 */
3670FNIEMOP_DEF(iemOp_jbe_Jb)
3671{
3672 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3673 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3674 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3675
3676 IEM_MC_BEGIN(0, 0, 0, 0);
3677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3678 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3679 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3680 } IEM_MC_ELSE() {
3681 IEM_MC_ADVANCE_RIP_AND_FINISH();
3682 } IEM_MC_ENDIF();
3683 IEM_MC_END();
3684}
3685
3686
3687/**
3688 * @opcode 0x77
3689 * @opfltest cf,zf
3690 */
3691FNIEMOP_DEF(iemOp_jnbe_Jb)
3692{
3693 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3694 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3695 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3696
3697 IEM_MC_BEGIN(0, 0, 0, 0);
3698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3699 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3700 IEM_MC_ADVANCE_RIP_AND_FINISH();
3701 } IEM_MC_ELSE() {
3702 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3703 } IEM_MC_ENDIF();
3704 IEM_MC_END();
3705}
3706
3707
3708/**
3709 * @opcode 0x78
3710 * @opfltest sf
3711 */
3712FNIEMOP_DEF(iemOp_js_Jb)
3713{
3714 IEMOP_MNEMONIC(js_Jb, "js Jb");
3715 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3716 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3717
3718 IEM_MC_BEGIN(0, 0, 0, 0);
3719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3720 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3721 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3722 } IEM_MC_ELSE() {
3723 IEM_MC_ADVANCE_RIP_AND_FINISH();
3724 } IEM_MC_ENDIF();
3725 IEM_MC_END();
3726}
3727
3728
3729/**
3730 * @opcode 0x79
3731 * @opfltest sf
3732 */
3733FNIEMOP_DEF(iemOp_jns_Jb)
3734{
3735 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3736 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3737 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3738
3739 IEM_MC_BEGIN(0, 0, 0, 0);
3740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3741 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3742 IEM_MC_ADVANCE_RIP_AND_FINISH();
3743 } IEM_MC_ELSE() {
3744 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3745 } IEM_MC_ENDIF();
3746 IEM_MC_END();
3747}
3748
3749
3750/**
3751 * @opcode 0x7a
3752 * @opfltest pf
3753 */
3754FNIEMOP_DEF(iemOp_jp_Jb)
3755{
3756 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3757 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3758 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3759
3760 IEM_MC_BEGIN(0, 0, 0, 0);
3761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3762 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3763 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3764 } IEM_MC_ELSE() {
3765 IEM_MC_ADVANCE_RIP_AND_FINISH();
3766 } IEM_MC_ENDIF();
3767 IEM_MC_END();
3768}
3769
3770
3771/**
3772 * @opcode 0x7b
3773 * @opfltest pf
3774 */
3775FNIEMOP_DEF(iemOp_jnp_Jb)
3776{
3777 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3778 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3779 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3780
3781 IEM_MC_BEGIN(0, 0, 0, 0);
3782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3783 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3784 IEM_MC_ADVANCE_RIP_AND_FINISH();
3785 } IEM_MC_ELSE() {
3786 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3787 } IEM_MC_ENDIF();
3788 IEM_MC_END();
3789}
3790
3791
3792/**
3793 * @opcode 0x7c
3794 * @opfltest sf,of
3795 */
3796FNIEMOP_DEF(iemOp_jl_Jb)
3797{
3798 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3799 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3800 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3801
3802 IEM_MC_BEGIN(0, 0, 0, 0);
3803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3804 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3805 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3806 } IEM_MC_ELSE() {
3807 IEM_MC_ADVANCE_RIP_AND_FINISH();
3808 } IEM_MC_ENDIF();
3809 IEM_MC_END();
3810}
3811
3812
3813/**
3814 * @opcode 0x7d
3815 * @opfltest sf,of
3816 */
3817FNIEMOP_DEF(iemOp_jnl_Jb)
3818{
3819 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3820 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3821 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3822
3823 IEM_MC_BEGIN(0, 0, 0, 0);
3824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3825 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3826 IEM_MC_ADVANCE_RIP_AND_FINISH();
3827 } IEM_MC_ELSE() {
3828 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3829 } IEM_MC_ENDIF();
3830 IEM_MC_END();
3831}
3832
3833
3834/**
3835 * @opcode 0x7e
3836 * @opfltest zf,sf,of
3837 */
3838FNIEMOP_DEF(iemOp_jle_Jb)
3839{
3840 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3841 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3842 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3843
3844 IEM_MC_BEGIN(0, 0, 0, 0);
3845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3846 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3847 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3848 } IEM_MC_ELSE() {
3849 IEM_MC_ADVANCE_RIP_AND_FINISH();
3850 } IEM_MC_ENDIF();
3851 IEM_MC_END();
3852}
3853
3854
3855/**
3856 * @opcode 0x7f
3857 * @opfltest zf,sf,of
3858 */
3859FNIEMOP_DEF(iemOp_jnle_Jb)
3860{
3861 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3862 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3863 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3864
3865 IEM_MC_BEGIN(0, 0, 0, 0);
3866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3867 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3868 IEM_MC_ADVANCE_RIP_AND_FINISH();
3869 } IEM_MC_ELSE() {
3870 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3871 } IEM_MC_ENDIF();
3872 IEM_MC_END();
3873}
3874
3875
3876/**
3877 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3878 * iemOp_Grp1_Eb_Ib_80.
3879 */
3880#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
3881 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3882 { \
3883 /* register target */ \
3884 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3885 IEM_MC_BEGIN(3, 0, 0, 0); \
3886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3887 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3888 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3889 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3890 \
3891 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3892 IEM_MC_REF_EFLAGS(pEFlags); \
3893 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3894 \
3895 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3896 IEM_MC_END(); \
3897 } \
3898 else \
3899 { \
3900 /* memory target */ \
3901 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
3902 { \
3903 IEM_MC_BEGIN(3, 3, 0, 0); \
3904 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3905 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3907 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3908 \
3909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3910 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3911 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3912 IEMOP_HLP_DONE_DECODING(); \
3913 \
3914 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3915 IEM_MC_FETCH_EFLAGS(EFlags); \
3916 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3917 \
3918 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
3919 IEM_MC_COMMIT_EFLAGS(EFlags); \
3920 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3921 IEM_MC_END(); \
3922 } \
3923 else \
3924 { \
3925 (void)0
3926
3927#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3928 IEM_MC_BEGIN(3, 3, 0, 0); \
3929 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3930 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3932 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3933 \
3934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3935 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3936 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3937 IEMOP_HLP_DONE_DECODING(); \
3938 \
3939 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3940 IEM_MC_FETCH_EFLAGS(EFlags); \
3941 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
3942 \
3943 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
3944 IEM_MC_COMMIT_EFLAGS(EFlags); \
3945 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3946 IEM_MC_END(); \
3947 } \
3948 } \
3949 (void)0
3950
3951#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
3952 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3953 { \
3954 /* register target */ \
3955 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3956 IEM_MC_BEGIN(3, 0, 0, 0); \
3957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3958 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3959 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3960 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3961 \
3962 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3963 IEM_MC_REF_EFLAGS(pEFlags); \
3964 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3965 \
3966 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3967 IEM_MC_END(); \
3968 } \
3969 else \
3970 { \
3971 /* memory target */ \
3972 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
3973 { \
3974 IEM_MC_BEGIN(3, 3, 0, 0); \
3975 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
3976 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3978 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3979 \
3980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3981 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3982 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3983 IEMOP_HLP_DONE_DECODING(); \
3984 \
3985 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3986 IEM_MC_FETCH_EFLAGS(EFlags); \
3987 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3988 \
3989 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
3990 IEM_MC_COMMIT_EFLAGS(EFlags); \
3991 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3992 IEM_MC_END(); \
3993 } \
3994 else \
3995 { \
3996 (void)0
3997
3998#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
3999 IEMOP_HLP_DONE_DECODING(); \
4000 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4001 } \
4002 } \
4003 (void)0
4004
4005
4006
4007/**
4008 * @opmaps grp1_80,grp1_83
4009 * @opcode /0
4010 * @opflclass arithmetic
4011 */
4012FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
4013{
4014 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
4015 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
4016 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
4017}
4018
4019
4020/**
4021 * @opmaps grp1_80,grp1_83
4022 * @opcode /1
4023 * @opflclass logical
4024 */
4025FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
4026{
4027 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
4028 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
4029 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
4030}
4031
4032
4033/**
4034 * @opmaps grp1_80,grp1_83
4035 * @opcode /2
4036 * @opflclass arithmetic_carry
4037 */
4038FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4039{
4040 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4041 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4042 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4043}
4044
4045
4046/**
4047 * @opmaps grp1_80,grp1_83
4048 * @opcode /3
4049 * @opflclass arithmetic_carry
4050 */
4051FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4052{
4053 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4054 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4055 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4056}
4057
4058
4059/**
4060 * @opmaps grp1_80,grp1_83
4061 * @opcode /4
4062 * @opflclass logical
4063 */
4064FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4065{
4066 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4067 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4068 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4069}
4070
4071
4072/**
4073 * @opmaps grp1_80,grp1_83
4074 * @opcode /5
4075 * @opflclass arithmetic
4076 */
4077FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4078{
4079 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4080 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4081 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4082}
4083
4084
4085/**
4086 * @opmaps grp1_80,grp1_83
4087 * @opcode /6
4088 * @opflclass logical
4089 */
4090FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4091{
4092 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4093 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4094 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4095}
4096
4097
4098/**
4099 * @opmaps grp1_80,grp1_83
4100 * @opcode /7
4101 * @opflclass arithmetic
4102 */
4103FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4104{
4105 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4106 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4107 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4108}
4109
4110
4111/**
4112 * @opcode 0x80
4113 */
4114FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4115{
4116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4117 switch (IEM_GET_MODRM_REG_8(bRm))
4118 {
4119 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4120 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4121 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4122 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4123 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4124 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4125 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4126 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4128 }
4129}
4130
4131
4132/**
4133 * Body for a group 1 binary operator.
4134 */
4135#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4136 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4137 { \
4138 /* register target */ \
4139 switch (pVCpu->iem.s.enmEffOpSize) \
4140 { \
4141 case IEMMODE_16BIT: \
4142 { \
4143 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4144 IEM_MC_BEGIN(3, 0, 0, 0); \
4145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4146 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4147 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4148 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4149 \
4150 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4151 IEM_MC_REF_EFLAGS(pEFlags); \
4152 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4153 \
4154 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4155 IEM_MC_END(); \
4156 break; \
4157 } \
4158 \
4159 case IEMMODE_32BIT: \
4160 { \
4161 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4162 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4164 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4165 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4166 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4167 \
4168 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4169 IEM_MC_REF_EFLAGS(pEFlags); \
4170 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4171 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4172 \
4173 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4174 IEM_MC_END(); \
4175 break; \
4176 } \
4177 \
4178 case IEMMODE_64BIT: \
4179 { \
4180 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4181 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4183 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4184 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4185 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4186 \
4187 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4188 IEM_MC_REF_EFLAGS(pEFlags); \
4189 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4190 \
4191 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4192 IEM_MC_END(); \
4193 break; \
4194 } \
4195 \
4196 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4197 } \
4198 } \
4199 else \
4200 { \
4201 /* memory target */ \
4202 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4203 { \
4204 switch (pVCpu->iem.s.enmEffOpSize) \
4205 { \
4206 case IEMMODE_16BIT: \
4207 { \
4208 IEM_MC_BEGIN(3, 3, 0, 0); \
4209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4211 \
4212 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4213 IEMOP_HLP_DONE_DECODING(); \
4214 \
4215 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4216 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4217 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4218 \
4219 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4220 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4221 IEM_MC_FETCH_EFLAGS(EFlags); \
4222 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4223 \
4224 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4225 IEM_MC_COMMIT_EFLAGS(EFlags); \
4226 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4227 IEM_MC_END(); \
4228 break; \
4229 } \
4230 \
4231 case IEMMODE_32BIT: \
4232 { \
4233 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4236 \
4237 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4238 IEMOP_HLP_DONE_DECODING(); \
4239 \
4240 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4241 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4242 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4243 \
4244 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4245 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4246 IEM_MC_FETCH_EFLAGS(EFlags); \
4247 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4248 \
4249 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4250 IEM_MC_COMMIT_EFLAGS(EFlags); \
4251 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4252 IEM_MC_END(); \
4253 break; \
4254 } \
4255 \
4256 case IEMMODE_64BIT: \
4257 { \
4258 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4259 \
4260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4262 \
4263 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4264 IEMOP_HLP_DONE_DECODING(); \
4265 \
4266 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4267 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4268 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4269 \
4270 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4271 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4272 IEM_MC_FETCH_EFLAGS(EFlags); \
4273 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4274 \
4275 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4276 IEM_MC_COMMIT_EFLAGS(EFlags); \
4277 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4278 IEM_MC_END(); \
4279 break; \
4280 } \
4281 \
4282 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4283 } \
4284 } \
4285 else \
4286 { \
4287 (void)0
4288/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4289#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4290 switch (pVCpu->iem.s.enmEffOpSize) \
4291 { \
4292 case IEMMODE_16BIT: \
4293 { \
4294 IEM_MC_BEGIN(3, 3, 0, 0); \
4295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4297 \
4298 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4299 IEMOP_HLP_DONE_DECODING(); \
4300 \
4301 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4302 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4303 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4304 \
4305 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4306 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4307 IEM_MC_FETCH_EFLAGS(EFlags); \
4308 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4309 \
4310 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4311 IEM_MC_COMMIT_EFLAGS(EFlags); \
4312 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4313 IEM_MC_END(); \
4314 break; \
4315 } \
4316 \
4317 case IEMMODE_32BIT: \
4318 { \
4319 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4322 \
4323 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4324 IEMOP_HLP_DONE_DECODING(); \
4325 \
4326 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4327 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4328 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4329 \
4330 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4331 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4332 IEM_MC_FETCH_EFLAGS(EFlags); \
4333 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4334 \
4335 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4336 IEM_MC_COMMIT_EFLAGS(EFlags); \
4337 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4338 IEM_MC_END(); \
4339 break; \
4340 } \
4341 \
4342 case IEMMODE_64BIT: \
4343 { \
4344 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4347 \
4348 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4349 IEMOP_HLP_DONE_DECODING(); \
4350 \
4351 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4352 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4353 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4354 \
4355 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4356 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4357 IEM_MC_FETCH_EFLAGS(EFlags); \
4358 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4359 \
4360 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4361 IEM_MC_COMMIT_EFLAGS(EFlags); \
4362 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4363 IEM_MC_END(); \
4364 break; \
4365 } \
4366 \
4367 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4368 } \
4369 } \
4370 } \
4371 (void)0
4372
4373/* read-only version */
4374#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4375 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4376 { \
4377 /* register target */ \
4378 switch (pVCpu->iem.s.enmEffOpSize) \
4379 { \
4380 case IEMMODE_16BIT: \
4381 { \
4382 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4383 IEM_MC_BEGIN(3, 0, 0, 0); \
4384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4385 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4386 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4387 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4388 \
4389 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4390 IEM_MC_REF_EFLAGS(pEFlags); \
4391 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4392 \
4393 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4394 IEM_MC_END(); \
4395 break; \
4396 } \
4397 \
4398 case IEMMODE_32BIT: \
4399 { \
4400 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4401 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4403 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4404 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4405 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4406 \
4407 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4408 IEM_MC_REF_EFLAGS(pEFlags); \
4409 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4410 \
4411 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4412 IEM_MC_END(); \
4413 break; \
4414 } \
4415 \
4416 case IEMMODE_64BIT: \
4417 { \
4418 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4419 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4421 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4422 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4423 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4424 \
4425 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4426 IEM_MC_REF_EFLAGS(pEFlags); \
4427 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4428 \
4429 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4430 IEM_MC_END(); \
4431 break; \
4432 } \
4433 \
4434 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4435 } \
4436 } \
4437 else \
4438 { \
4439 /* memory target */ \
4440 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4441 { \
4442 switch (pVCpu->iem.s.enmEffOpSize) \
4443 { \
4444 case IEMMODE_16BIT: \
4445 { \
4446 IEM_MC_BEGIN(3, 3, 0, 0); \
4447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4449 \
4450 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4451 IEMOP_HLP_DONE_DECODING(); \
4452 \
4453 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4454 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4455 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4456 \
4457 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4458 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4459 IEM_MC_FETCH_EFLAGS(EFlags); \
4460 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4461 \
4462 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4463 IEM_MC_COMMIT_EFLAGS(EFlags); \
4464 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4465 IEM_MC_END(); \
4466 break; \
4467 } \
4468 \
4469 case IEMMODE_32BIT: \
4470 { \
4471 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4474 \
4475 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4476 IEMOP_HLP_DONE_DECODING(); \
4477 \
4478 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4479 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4480 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4481 \
4482 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4483 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4484 IEM_MC_FETCH_EFLAGS(EFlags); \
4485 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4486 \
4487 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4488 IEM_MC_COMMIT_EFLAGS(EFlags); \
4489 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4490 IEM_MC_END(); \
4491 break; \
4492 } \
4493 \
4494 case IEMMODE_64BIT: \
4495 { \
4496 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4499 \
4500 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4501 IEMOP_HLP_DONE_DECODING(); \
4502 \
4503 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4504 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4505 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4506 \
4507 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4508 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4509 IEM_MC_FETCH_EFLAGS(EFlags); \
4510 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4511 \
4512 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4513 IEM_MC_COMMIT_EFLAGS(EFlags); \
4514 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4515 IEM_MC_END(); \
4516 break; \
4517 } \
4518 \
4519 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4520 } \
4521 } \
4522 else \
4523 { \
4524 IEMOP_HLP_DONE_DECODING(); \
4525 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4526 } \
4527 } \
4528 (void)0
4529
4530
4531/**
4532 * @opmaps grp1_81
4533 * @opcode /0
4534 * @opflclass arithmetic
4535 */
4536FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4537{
4538 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4539 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4540 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4541}
4542
4543
4544/**
4545 * @opmaps grp1_81
4546 * @opcode /1
4547 * @opflclass logical
4548 */
4549FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4550{
4551 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4552 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4553 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4554}
4555
4556
4557/**
4558 * @opmaps grp1_81
4559 * @opcode /2
4560 * @opflclass arithmetic_carry
4561 */
4562FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4563{
4564 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4565 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4566 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4567}
4568
4569
4570/**
4571 * @opmaps grp1_81
4572 * @opcode /3
4573 * @opflclass arithmetic_carry
4574 */
4575FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4576{
4577 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4578 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4579 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4580}
4581
4582
4583/**
4584 * @opmaps grp1_81
4585 * @opcode /4
4586 * @opflclass logical
4587 */
4588FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4589{
4590 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4591 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4592 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4593}
4594
4595
4596/**
4597 * @opmaps grp1_81
4598 * @opcode /5
4599 * @opflclass arithmetic
4600 */
4601FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4602{
4603 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4604 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4605 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4606}
4607
4608
4609/**
4610 * @opmaps grp1_81
4611 * @opcode /6
4612 * @opflclass logical
4613 */
4614FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4615{
4616 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4617 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4618 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4619}
4620
4621
4622/**
4623 * @opmaps grp1_81
4624 * @opcode /7
4625 * @opflclass arithmetic
4626 */
4627FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4628{
4629 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4630 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4631}
4632
4633
4634/**
4635 * @opcode 0x81
4636 */
4637FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4638{
4639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4640 switch (IEM_GET_MODRM_REG_8(bRm))
4641 {
4642 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4643 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4644 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4645 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4646 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4647 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4648 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4649 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4650 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4651 }
4652}
4653
4654
4655/**
4656 * @opcode 0x82
4657 * @opmnemonic grp1_82
4658 * @opgroup og_groups
4659 */
4660FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4661{
4662 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4663 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4664}
4665
4666
4667/**
4668 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4669 * iemOp_Grp1_Ev_Ib.
4670 */
4671#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4672 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4673 { \
4674 /* \
4675 * Register target \
4676 */ \
4677 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4678 switch (pVCpu->iem.s.enmEffOpSize) \
4679 { \
4680 case IEMMODE_16BIT: \
4681 IEM_MC_BEGIN(3, 0, 0, 0); \
4682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4683 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4684 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4685 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4686 \
4687 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4688 IEM_MC_REF_EFLAGS(pEFlags); \
4689 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4690 \
4691 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4692 IEM_MC_END(); \
4693 break; \
4694 \
4695 case IEMMODE_32BIT: \
4696 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4698 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4699 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4700 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4701 \
4702 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4703 IEM_MC_REF_EFLAGS(pEFlags); \
4704 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4705 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4706 \
4707 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4708 IEM_MC_END(); \
4709 break; \
4710 \
4711 case IEMMODE_64BIT: \
4712 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4714 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4715 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4716 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4717 \
4718 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4719 IEM_MC_REF_EFLAGS(pEFlags); \
4720 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4721 \
4722 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4723 IEM_MC_END(); \
4724 break; \
4725 \
4726 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4727 } \
4728 } \
4729 else \
4730 { \
4731 /* \
4732 * Memory target. \
4733 */ \
4734 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4735 { \
4736 switch (pVCpu->iem.s.enmEffOpSize) \
4737 { \
4738 case IEMMODE_16BIT: \
4739 IEM_MC_BEGIN(3, 3, 0, 0); \
4740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4742 \
4743 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4744 IEMOP_HLP_DONE_DECODING(); \
4745 \
4746 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4747 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4748 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4749 \
4750 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4751 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4752 IEM_MC_FETCH_EFLAGS(EFlags); \
4753 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4754 \
4755 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4756 IEM_MC_COMMIT_EFLAGS(EFlags); \
4757 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4758 IEM_MC_END(); \
4759 break; \
4760 \
4761 case IEMMODE_32BIT: \
4762 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4765 \
4766 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4767 IEMOP_HLP_DONE_DECODING(); \
4768 \
4769 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4770 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4771 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4772 \
4773 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4774 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4775 IEM_MC_FETCH_EFLAGS(EFlags); \
4776 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4777 \
4778 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4779 IEM_MC_COMMIT_EFLAGS(EFlags); \
4780 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4781 IEM_MC_END(); \
4782 break; \
4783 \
4784 case IEMMODE_64BIT: \
4785 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4788 \
4789 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4790 IEMOP_HLP_DONE_DECODING(); \
4791 \
4792 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4793 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4794 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4795 \
4796 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4797 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4798 IEM_MC_FETCH_EFLAGS(EFlags); \
4799 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4800 \
4801 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4802 IEM_MC_COMMIT_EFLAGS(EFlags); \
4803 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4804 IEM_MC_END(); \
4805 break; \
4806 \
4807 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4808 } \
4809 } \
4810 else \
4811 { \
4812 (void)0
4813/* Separate macro to work around parsing issue in IEMAllInstPython.py */
4814#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4815 switch (pVCpu->iem.s.enmEffOpSize) \
4816 { \
4817 case IEMMODE_16BIT: \
4818 IEM_MC_BEGIN(3, 3, 0, 0); \
4819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4821 \
4822 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4823 IEMOP_HLP_DONE_DECODING(); \
4824 \
4825 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4826 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4827 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4828 \
4829 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4830 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4831 IEM_MC_FETCH_EFLAGS(EFlags); \
4832 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4833 \
4834 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4835 IEM_MC_COMMIT_EFLAGS(EFlags); \
4836 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4837 IEM_MC_END(); \
4838 break; \
4839 \
4840 case IEMMODE_32BIT: \
4841 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4844 \
4845 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4846 IEMOP_HLP_DONE_DECODING(); \
4847 \
4848 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4849 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4850 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4851 \
4852 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4853 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4854 IEM_MC_FETCH_EFLAGS(EFlags); \
4855 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4856 \
4857 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4858 IEM_MC_COMMIT_EFLAGS(EFlags); \
4859 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4860 IEM_MC_END(); \
4861 break; \
4862 \
4863 case IEMMODE_64BIT: \
4864 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4867 \
4868 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4869 IEMOP_HLP_DONE_DECODING(); \
4870 \
4871 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4872 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4873 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4874 \
4875 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4876 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4877 IEM_MC_FETCH_EFLAGS(EFlags); \
4878 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4879 \
4880 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4881 IEM_MC_COMMIT_EFLAGS(EFlags); \
4882 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4883 IEM_MC_END(); \
4884 break; \
4885 \
4886 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4887 } \
4888 } \
4889 } \
4890 (void)0
4891
4892/* read-only variant */
4893#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4894 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4895 { \
4896 /* \
4897 * Register target \
4898 */ \
4899 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4900 switch (pVCpu->iem.s.enmEffOpSize) \
4901 { \
4902 case IEMMODE_16BIT: \
4903 IEM_MC_BEGIN(3, 0, 0, 0); \
4904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4905 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4906 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4907 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4908 \
4909 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4910 IEM_MC_REF_EFLAGS(pEFlags); \
4911 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4912 \
4913 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4914 IEM_MC_END(); \
4915 break; \
4916 \
4917 case IEMMODE_32BIT: \
4918 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4920 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4921 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4922 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4923 \
4924 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4925 IEM_MC_REF_EFLAGS(pEFlags); \
4926 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4927 \
4928 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4929 IEM_MC_END(); \
4930 break; \
4931 \
4932 case IEMMODE_64BIT: \
4933 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4935 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4936 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4937 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4938 \
4939 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4940 IEM_MC_REF_EFLAGS(pEFlags); \
4941 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4942 \
4943 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4944 IEM_MC_END(); \
4945 break; \
4946 \
4947 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4948 } \
4949 } \
4950 else \
4951 { \
4952 /* \
4953 * Memory target. \
4954 */ \
4955 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4956 { \
4957 switch (pVCpu->iem.s.enmEffOpSize) \
4958 { \
4959 case IEMMODE_16BIT: \
4960 IEM_MC_BEGIN(3, 3, 0, 0); \
4961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4963 \
4964 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4965 IEMOP_HLP_DONE_DECODING(); \
4966 \
4967 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4968 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4969 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4970 \
4971 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4972 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4973 IEM_MC_FETCH_EFLAGS(EFlags); \
4974 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4975 \
4976 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4977 IEM_MC_COMMIT_EFLAGS(EFlags); \
4978 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4979 IEM_MC_END(); \
4980 break; \
4981 \
4982 case IEMMODE_32BIT: \
4983 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4984 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4986 \
4987 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4988 IEMOP_HLP_DONE_DECODING(); \
4989 \
4990 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4991 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4992 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4993 \
4994 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4995 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4996 IEM_MC_FETCH_EFLAGS(EFlags); \
4997 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4998 \
4999 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5000 IEM_MC_COMMIT_EFLAGS(EFlags); \
5001 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5002 IEM_MC_END(); \
5003 break; \
5004 \
5005 case IEMMODE_64BIT: \
5006 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5009 \
5010 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5011 IEMOP_HLP_DONE_DECODING(); \
5012 \
5013 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5014 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
5015 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5016 \
5017 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
5018 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5019 IEM_MC_FETCH_EFLAGS(EFlags); \
5020 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5021 \
5022 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5023 IEM_MC_COMMIT_EFLAGS(EFlags); \
5024 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5025 IEM_MC_END(); \
5026 break; \
5027 \
5028 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5029 } \
5030 } \
5031 else \
5032 { \
5033 IEMOP_HLP_DONE_DECODING(); \
5034 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5035 } \
5036 } \
5037 (void)0
5038
5039/**
5040 * @opmaps grp1_83
5041 * @opcode /0
5042 * @opflclass arithmetic
5043 */
5044FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5045{
5046 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5047 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
5048 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
5049}
5050
5051
5052/**
5053 * @opmaps grp1_83
5054 * @opcode /1
5055 * @opflclass logical
5056 */
5057FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5058{
5059 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5060 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
5061 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
5062}
5063
5064
5065/**
5066 * @opmaps grp1_83
5067 * @opcode /2
5068 * @opflclass arithmetic_carry
5069 */
5070FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5071{
5072 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5073 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5074 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5075}
5076
5077
5078/**
5079 * @opmaps grp1_83
5080 * @opcode /3
5081 * @opflclass arithmetic_carry
5082 */
5083FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5084{
5085 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5086 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5087 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5088}
5089
5090
5091/**
5092 * @opmaps grp1_83
5093 * @opcode /4
5094 * @opflclass logical
5095 */
5096FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5097{
5098 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5099 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5100 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5101}
5102
5103
5104/**
5105 * @opmaps grp1_83
5106 * @opcode /5
5107 * @opflclass arithmetic
5108 */
5109FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5110{
5111 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5112 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5113 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5114}
5115
5116
5117/**
5118 * @opmaps grp1_83
5119 * @opcode /6
5120 * @opflclass logical
5121 */
5122FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5123{
5124 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5125 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5126 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5127}
5128
5129
5130/**
5131 * @opmaps grp1_83
5132 * @opcode /7
5133 * @opflclass arithmetic
5134 */
5135FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5136{
5137 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5138 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5139}
5140
5141
5142/**
5143 * @opcode 0x83
5144 */
5145FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5146{
5147 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5148 to the 386 even if absent in the intel reference manuals and some
5149 3rd party opcode listings. */
5150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5151 switch (IEM_GET_MODRM_REG_8(bRm))
5152 {
5153 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5154 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5155 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5156 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5157 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5158 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5159 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5160 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5162 }
5163}
5164
5165
5166/**
5167 * @opcode 0x84
5168 * @opflclass logical
5169 */
5170FNIEMOP_DEF(iemOp_test_Eb_Gb)
5171{
5172 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5173 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5174 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8);
5175}
5176
5177
5178/**
5179 * @opcode 0x85
5180 * @opflclass logical
5181 */
5182FNIEMOP_DEF(iemOp_test_Ev_Gv)
5183{
5184 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5185 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5186 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64);
5187}
5188
5189
5190/**
5191 * @opcode 0x86
5192 */
5193FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5194{
5195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5196 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5197
5198 /*
5199 * If rm is denoting a register, no more instruction bytes.
5200 */
5201 if (IEM_IS_MODRM_REG_MODE(bRm))
5202 {
5203 IEM_MC_BEGIN(0, 2, 0, 0);
5204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5205 IEM_MC_LOCAL(uint8_t, uTmp1);
5206 IEM_MC_LOCAL(uint8_t, uTmp2);
5207
5208 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5209 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5210 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5211 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5212
5213 IEM_MC_ADVANCE_RIP_AND_FINISH();
5214 IEM_MC_END();
5215 }
5216 else
5217 {
5218 /*
5219 * We're accessing memory.
5220 */
5221#define IEMOP_XCHG_BYTE(a_fnWorker, a_Style) \
5222 IEM_MC_BEGIN(2, 4, 0, 0); \
5223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5224 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5225 IEM_MC_LOCAL(uint8_t, uTmpReg); \
5226 IEM_MC_ARG(uint8_t *, pu8Mem, 0); \
5227 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1); \
5228 \
5229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5230 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5231 IEM_MC_MEM_MAP_U8_##a_Style(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5232 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5233 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker, pu8Mem, pu8Reg); \
5234 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Style(bUnmapInfo); \
5235 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5236 \
5237 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5238 IEM_MC_END()
5239
5240 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5241 {
5242 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_locked,ATOMIC);
5243 }
5244 else
5245 {
5246 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_unlocked,RW);
5247 }
5248 }
5249}
5250
5251
5252/**
5253 * @opcode 0x87
5254 */
5255FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5256{
5257 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5258 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5259
5260 /*
5261 * If rm is denoting a register, no more instruction bytes.
5262 */
5263 if (IEM_IS_MODRM_REG_MODE(bRm))
5264 {
5265 switch (pVCpu->iem.s.enmEffOpSize)
5266 {
5267 case IEMMODE_16BIT:
5268 IEM_MC_BEGIN(0, 2, 0, 0);
5269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5270 IEM_MC_LOCAL(uint16_t, uTmp1);
5271 IEM_MC_LOCAL(uint16_t, uTmp2);
5272
5273 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5274 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5275 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5276 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5277
5278 IEM_MC_ADVANCE_RIP_AND_FINISH();
5279 IEM_MC_END();
5280 break;
5281
5282 case IEMMODE_32BIT:
5283 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5285 IEM_MC_LOCAL(uint32_t, uTmp1);
5286 IEM_MC_LOCAL(uint32_t, uTmp2);
5287
5288 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5289 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5290 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5291 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5292
5293 IEM_MC_ADVANCE_RIP_AND_FINISH();
5294 IEM_MC_END();
5295 break;
5296
5297 case IEMMODE_64BIT:
5298 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5300 IEM_MC_LOCAL(uint64_t, uTmp1);
5301 IEM_MC_LOCAL(uint64_t, uTmp2);
5302
5303 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5304 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5305 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5306 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5307
5308 IEM_MC_ADVANCE_RIP_AND_FINISH();
5309 IEM_MC_END();
5310 break;
5311
5312 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5313 }
5314 }
5315 else
5316 {
5317 /*
5318 * We're accessing memory.
5319 */
5320#define IEMOP_XCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
5321 do { \
5322 switch (pVCpu->iem.s.enmEffOpSize) \
5323 { \
5324 case IEMMODE_16BIT: \
5325 IEM_MC_BEGIN(2, 4, 0, 0); \
5326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5327 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5328 IEM_MC_LOCAL(uint16_t, uTmpReg); \
5329 IEM_MC_ARG(uint16_t *, pu16Mem, 0); \
5330 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1); \
5331 \
5332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5333 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5334 IEM_MC_MEM_MAP_U16_##a_Type(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5335 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5336 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker16, pu16Mem, pu16Reg); \
5337 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5338 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5339 \
5340 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5341 IEM_MC_END(); \
5342 break; \
5343 \
5344 case IEMMODE_32BIT: \
5345 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0); \
5346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5347 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5348 IEM_MC_LOCAL(uint32_t, uTmpReg); \
5349 IEM_MC_ARG(uint32_t *, pu32Mem, 0); \
5350 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1); \
5351 \
5352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5353 IEMOP_HLP_DONE_DECODING(); \
5354 IEM_MC_MEM_MAP_U32_##a_Type(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5355 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5356 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker32, pu32Mem, pu32Reg); \
5357 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5358 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5359 \
5360 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5361 IEM_MC_END(); \
5362 break; \
5363 \
5364 case IEMMODE_64BIT: \
5365 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0); \
5366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5367 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5368 IEM_MC_LOCAL(uint64_t, uTmpReg); \
5369 IEM_MC_ARG(uint64_t *, pu64Mem, 0); \
5370 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1); \
5371 \
5372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5373 IEMOP_HLP_DONE_DECODING(); \
5374 IEM_MC_MEM_MAP_U64_##a_Type(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5375 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5376 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker64, pu64Mem, pu64Reg); \
5377 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5378 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5379 \
5380 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5381 IEM_MC_END(); \
5382 break; \
5383 \
5384 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5385 } \
5386 } while (0)
5387 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5388 {
5389 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_locked, iemAImpl_xchg_u32_locked, iemAImpl_xchg_u64_locked,ATOMIC);
5390 }
5391 else
5392 {
5393 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_unlocked, iemAImpl_xchg_u32_unlocked, iemAImpl_xchg_u64_unlocked,RW);
5394 }
5395 }
5396}
5397
5398
5399/**
5400 * @opcode 0x88
5401 */
5402FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5403{
5404 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5405
5406 uint8_t bRm;
5407 IEM_OPCODE_GET_NEXT_U8(&bRm);
5408
5409 /*
5410 * If rm is denoting a register, no more instruction bytes.
5411 */
5412 if (IEM_IS_MODRM_REG_MODE(bRm))
5413 {
5414 IEM_MC_BEGIN(0, 1, 0, 0);
5415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5416 IEM_MC_LOCAL(uint8_t, u8Value);
5417 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5418 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5419 IEM_MC_ADVANCE_RIP_AND_FINISH();
5420 IEM_MC_END();
5421 }
5422 else
5423 {
5424 /*
5425 * We're writing a register to memory.
5426 */
5427 IEM_MC_BEGIN(0, 2, 0, 0);
5428 IEM_MC_LOCAL(uint8_t, u8Value);
5429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5432 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5433 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5434 IEM_MC_ADVANCE_RIP_AND_FINISH();
5435 IEM_MC_END();
5436 }
5437}
5438
5439
5440/**
5441 * @opcode 0x89
5442 */
5443FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5444{
5445 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5446
5447 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5448
5449 /*
5450 * If rm is denoting a register, no more instruction bytes.
5451 */
5452 if (IEM_IS_MODRM_REG_MODE(bRm))
5453 {
5454 switch (pVCpu->iem.s.enmEffOpSize)
5455 {
5456 case IEMMODE_16BIT:
5457 IEM_MC_BEGIN(0, 1, 0, 0);
5458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5459 IEM_MC_LOCAL(uint16_t, u16Value);
5460 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5461 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5462 IEM_MC_ADVANCE_RIP_AND_FINISH();
5463 IEM_MC_END();
5464 break;
5465
5466 case IEMMODE_32BIT:
5467 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5469 IEM_MC_LOCAL(uint32_t, u32Value);
5470 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5471 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5472 IEM_MC_ADVANCE_RIP_AND_FINISH();
5473 IEM_MC_END();
5474 break;
5475
5476 case IEMMODE_64BIT:
5477 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5479 IEM_MC_LOCAL(uint64_t, u64Value);
5480 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5481 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5482 IEM_MC_ADVANCE_RIP_AND_FINISH();
5483 IEM_MC_END();
5484 break;
5485
5486 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5487 }
5488 }
5489 else
5490 {
5491 /*
5492 * We're writing a register to memory.
5493 */
5494 switch (pVCpu->iem.s.enmEffOpSize)
5495 {
5496 case IEMMODE_16BIT:
5497 IEM_MC_BEGIN(0, 2, 0, 0);
5498 IEM_MC_LOCAL(uint16_t, u16Value);
5499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5502 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5503 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5504 IEM_MC_ADVANCE_RIP_AND_FINISH();
5505 IEM_MC_END();
5506 break;
5507
5508 case IEMMODE_32BIT:
5509 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5510 IEM_MC_LOCAL(uint32_t, u32Value);
5511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5514 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5515 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5516 IEM_MC_ADVANCE_RIP_AND_FINISH();
5517 IEM_MC_END();
5518 break;
5519
5520 case IEMMODE_64BIT:
5521 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5522 IEM_MC_LOCAL(uint64_t, u64Value);
5523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5526 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5527 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5528 IEM_MC_ADVANCE_RIP_AND_FINISH();
5529 IEM_MC_END();
5530 break;
5531
5532 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5533 }
5534 }
5535}
5536
5537
5538/**
5539 * @opcode 0x8a
5540 */
5541FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5542{
5543 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5544
5545 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5546
5547 /*
5548 * If rm is denoting a register, no more instruction bytes.
5549 */
5550 if (IEM_IS_MODRM_REG_MODE(bRm))
5551 {
5552 IEM_MC_BEGIN(0, 1, 0, 0);
5553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5554 IEM_MC_LOCAL(uint8_t, u8Value);
5555 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5556 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5557 IEM_MC_ADVANCE_RIP_AND_FINISH();
5558 IEM_MC_END();
5559 }
5560 else
5561 {
5562 /*
5563 * We're loading a register from memory.
5564 */
5565 IEM_MC_BEGIN(0, 2, 0, 0);
5566 IEM_MC_LOCAL(uint8_t, u8Value);
5567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5570 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5571 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5572 IEM_MC_ADVANCE_RIP_AND_FINISH();
5573 IEM_MC_END();
5574 }
5575}
5576
5577
5578/**
5579 * @opcode 0x8b
5580 */
5581FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5582{
5583 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5584
5585 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5586
5587 /*
5588 * If rm is denoting a register, no more instruction bytes.
5589 */
5590 if (IEM_IS_MODRM_REG_MODE(bRm))
5591 {
5592 switch (pVCpu->iem.s.enmEffOpSize)
5593 {
5594 case IEMMODE_16BIT:
5595 IEM_MC_BEGIN(0, 1, 0, 0);
5596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5597 IEM_MC_LOCAL(uint16_t, u16Value);
5598 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5599 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5600 IEM_MC_ADVANCE_RIP_AND_FINISH();
5601 IEM_MC_END();
5602 break;
5603
5604 case IEMMODE_32BIT:
5605 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5607 IEM_MC_LOCAL(uint32_t, u32Value);
5608 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5609 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5610 IEM_MC_ADVANCE_RIP_AND_FINISH();
5611 IEM_MC_END();
5612 break;
5613
5614 case IEMMODE_64BIT:
5615 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5617 IEM_MC_LOCAL(uint64_t, u64Value);
5618 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5619 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5620 IEM_MC_ADVANCE_RIP_AND_FINISH();
5621 IEM_MC_END();
5622 break;
5623
5624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5625 }
5626 }
5627 else
5628 {
5629 /*
5630 * We're loading a register from memory.
5631 */
5632 switch (pVCpu->iem.s.enmEffOpSize)
5633 {
5634 case IEMMODE_16BIT:
5635 IEM_MC_BEGIN(0, 2, 0, 0);
5636 IEM_MC_LOCAL(uint16_t, u16Value);
5637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5640 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5641 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5642 IEM_MC_ADVANCE_RIP_AND_FINISH();
5643 IEM_MC_END();
5644 break;
5645
5646 case IEMMODE_32BIT:
5647 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5648 IEM_MC_LOCAL(uint32_t, u32Value);
5649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5652 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5653 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5654 IEM_MC_ADVANCE_RIP_AND_FINISH();
5655 IEM_MC_END();
5656 break;
5657
5658 case IEMMODE_64BIT:
5659 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5660 IEM_MC_LOCAL(uint64_t, u64Value);
5661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5664 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5665 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5666 IEM_MC_ADVANCE_RIP_AND_FINISH();
5667 IEM_MC_END();
5668 break;
5669
5670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5671 }
5672 }
5673}
5674
5675
5676/**
5677 * opcode 0x63
5678 * @todo Table fixme
5679 */
5680FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5681{
5682 if (!IEM_IS_64BIT_CODE(pVCpu))
5683 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5684 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5685 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5686 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5687}
5688
5689
5690/**
5691 * @opcode 0x8c
5692 */
5693FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5694{
5695 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5696
5697 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5698
5699 /*
5700 * Check that the destination register exists. The REX.R prefix is ignored.
5701 */
5702 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5703 if (iSegReg > X86_SREG_GS)
5704 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5705
5706 /*
5707 * If rm is denoting a register, no more instruction bytes.
5708 * In that case, the operand size is respected and the upper bits are
5709 * cleared (starting with some pentium).
5710 */
5711 if (IEM_IS_MODRM_REG_MODE(bRm))
5712 {
5713 switch (pVCpu->iem.s.enmEffOpSize)
5714 {
5715 case IEMMODE_16BIT:
5716 IEM_MC_BEGIN(0, 1, 0, 0);
5717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5718 IEM_MC_LOCAL(uint16_t, u16Value);
5719 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5720 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5721 IEM_MC_ADVANCE_RIP_AND_FINISH();
5722 IEM_MC_END();
5723 break;
5724
5725 case IEMMODE_32BIT:
5726 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5728 IEM_MC_LOCAL(uint32_t, u32Value);
5729 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5730 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5731 IEM_MC_ADVANCE_RIP_AND_FINISH();
5732 IEM_MC_END();
5733 break;
5734
5735 case IEMMODE_64BIT:
5736 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5738 IEM_MC_LOCAL(uint64_t, u64Value);
5739 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5740 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5741 IEM_MC_ADVANCE_RIP_AND_FINISH();
5742 IEM_MC_END();
5743 break;
5744
5745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5746 }
5747 }
5748 else
5749 {
5750 /*
5751 * We're saving the register to memory. The access is word sized
5752 * regardless of operand size prefixes.
5753 */
5754#if 0 /* not necessary */
5755 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5756#endif
5757 IEM_MC_BEGIN(0, 2, 0, 0);
5758 IEM_MC_LOCAL(uint16_t, u16Value);
5759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5762 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5763 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5764 IEM_MC_ADVANCE_RIP_AND_FINISH();
5765 IEM_MC_END();
5766 }
5767}
5768
5769
5770
5771
5772/**
5773 * @opcode 0x8d
5774 */
5775FNIEMOP_DEF(iemOp_lea_Gv_M)
5776{
5777 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5778 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5779 if (IEM_IS_MODRM_REG_MODE(bRm))
5780 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5781
5782 switch (pVCpu->iem.s.enmEffOpSize)
5783 {
5784 case IEMMODE_16BIT:
5785 IEM_MC_BEGIN(0, 2, 0, 0);
5786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5789 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
5790 * operand-size, which is usually the case. It'll save an instruction
5791 * and a register. */
5792 IEM_MC_LOCAL(uint16_t, u16Cast);
5793 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5794 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5795 IEM_MC_ADVANCE_RIP_AND_FINISH();
5796 IEM_MC_END();
5797 break;
5798
5799 case IEMMODE_32BIT:
5800 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5804 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
5805 * operand-size, which is usually the case. It'll save an instruction
5806 * and a register. */
5807 IEM_MC_LOCAL(uint32_t, u32Cast);
5808 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5809 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5810 IEM_MC_ADVANCE_RIP_AND_FINISH();
5811 IEM_MC_END();
5812 break;
5813
5814 case IEMMODE_64BIT:
5815 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5819 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5820 IEM_MC_ADVANCE_RIP_AND_FINISH();
5821 IEM_MC_END();
5822 break;
5823
5824 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5825 }
5826}
5827
5828
5829/**
5830 * @opcode 0x8e
5831 */
5832FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5833{
5834 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5835
5836 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5837
5838 /*
5839 * The practical operand size is 16-bit.
5840 */
5841#if 0 /* not necessary */
5842 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5843#endif
5844
5845 /*
5846 * Check that the destination register exists and can be used with this
5847 * instruction. The REX.R prefix is ignored.
5848 */
5849 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5850 /** @todo r=bird: What does 8086 do here wrt CS? */
5851 if ( iSegReg == X86_SREG_CS
5852 || iSegReg > X86_SREG_GS)
5853 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5854
5855 /*
5856 * If rm is denoting a register, no more instruction bytes.
5857 *
5858 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
5859 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
5860 * register. This is a restriction of the current recompiler
5861 * approach.
5862 */
5863 if (IEM_IS_MODRM_REG_MODE(bRm))
5864 {
5865#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
5866 IEM_MC_BEGIN(2, 0, 0, a_fCImplFlags); \
5867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5868 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5869 IEM_MC_ARG(uint16_t, u16Value, 1); \
5870 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5871 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5872 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5873 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5874 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
5875 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
5876 iemCImpl_load_SReg, iSRegArg, u16Value); \
5877 IEM_MC_END()
5878
5879 if (iSegReg == X86_SREG_SS)
5880 {
5881 if (IEM_IS_32BIT_CODE(pVCpu))
5882 {
5883 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5884 }
5885 else
5886 {
5887 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5888 }
5889 }
5890 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5891 {
5892 IEMOP_MOV_SW_EV_REG_BODY(0);
5893 }
5894 else
5895 {
5896 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
5897 }
5898#undef IEMOP_MOV_SW_EV_REG_BODY
5899 }
5900 else
5901 {
5902 /*
5903 * We're loading the register from memory. The access is word sized
5904 * regardless of operand size prefixes.
5905 */
5906#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
5907 IEM_MC_BEGIN(2, 1, 0, a_fCImplFlags); \
5908 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5909 IEM_MC_ARG(uint16_t, u16Value, 1); \
5910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5913 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5914 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5915 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5916 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5917 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
5918 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
5919 iemCImpl_load_SReg, iSRegArg, u16Value); \
5920 IEM_MC_END()
5921
5922 if (iSegReg == X86_SREG_SS)
5923 {
5924 if (IEM_IS_32BIT_CODE(pVCpu))
5925 {
5926 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5927 }
5928 else
5929 {
5930 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5931 }
5932 }
5933 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5934 {
5935 IEMOP_MOV_SW_EV_MEM_BODY(0);
5936 }
5937 else
5938 {
5939 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
5940 }
5941#undef IEMOP_MOV_SW_EV_MEM_BODY
5942 }
5943}
5944
5945
5946/** Opcode 0x8f /0. */
5947FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5948{
5949 /* This bugger is rather annoying as it requires rSP to be updated before
5950 doing the effective address calculations. Will eventually require a
5951 split between the R/M+SIB decoding and the effective address
5952 calculation - which is something that is required for any attempt at
5953 reusing this code for a recompiler. It may also be good to have if we
5954 need to delay #UD exception caused by invalid lock prefixes.
5955
5956 For now, we'll do a mostly safe interpreter-only implementation here. */
5957 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5958 * now until tests show it's checked.. */
5959 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5960
5961 /* Register access is relatively easy and can share code. */
5962 if (IEM_IS_MODRM_REG_MODE(bRm))
5963 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5964
5965 /*
5966 * Memory target.
5967 *
5968 * Intel says that RSP is incremented before it's used in any effective
5969 * address calcuations. This means some serious extra annoyance here since
5970 * we decode and calculate the effective address in one step and like to
5971 * delay committing registers till everything is done.
5972 *
5973 * So, we'll decode and calculate the effective address twice. This will
5974 * require some recoding if turned into a recompiler.
5975 */
5976 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5977
5978#if 1 /* This can be compiled, optimize later if needed. */
5979 switch (pVCpu->iem.s.enmEffOpSize)
5980 {
5981 case IEMMODE_16BIT:
5982 IEM_MC_BEGIN(2, 0, 0, 0);
5983 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
5985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5986 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
5987 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
5988 IEM_MC_END();
5989 break;
5990
5991 case IEMMODE_32BIT:
5992 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
5993 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
5995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5996 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
5997 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
5998 IEM_MC_END();
5999 break;
6000
6001 case IEMMODE_64BIT:
6002 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
6003 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
6005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6006 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6007 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
6008 IEM_MC_END();
6009 break;
6010
6011 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6012 }
6013
6014#else
6015# ifndef TST_IEM_CHECK_MC
6016 /* Calc effective address with modified ESP. */
6017/** @todo testcase */
6018 RTGCPTR GCPtrEff;
6019 VBOXSTRICTRC rcStrict;
6020 switch (pVCpu->iem.s.enmEffOpSize)
6021 {
6022 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
6023 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
6024 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
6025 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6026 }
6027 if (rcStrict != VINF_SUCCESS)
6028 return rcStrict;
6029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6030
6031 /* Perform the operation - this should be CImpl. */
6032 RTUINT64U TmpRsp;
6033 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
6034 switch (pVCpu->iem.s.enmEffOpSize)
6035 {
6036 case IEMMODE_16BIT:
6037 {
6038 uint16_t u16Value;
6039 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
6040 if (rcStrict == VINF_SUCCESS)
6041 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
6042 break;
6043 }
6044
6045 case IEMMODE_32BIT:
6046 {
6047 uint32_t u32Value;
6048 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
6049 if (rcStrict == VINF_SUCCESS)
6050 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
6051 break;
6052 }
6053
6054 case IEMMODE_64BIT:
6055 {
6056 uint64_t u64Value;
6057 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
6058 if (rcStrict == VINF_SUCCESS)
6059 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
6060 break;
6061 }
6062
6063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6064 }
6065 if (rcStrict == VINF_SUCCESS)
6066 {
6067 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
6068 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
6069 }
6070 return rcStrict;
6071
6072# else
6073 return VERR_IEM_IPE_2;
6074# endif
6075#endif
6076}
6077
6078
6079/**
6080 * @opcode 0x8f
6081 */
6082FNIEMOP_DEF(iemOp_Grp1A__xop)
6083{
6084 /*
6085 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
6086 * three byte VEX prefix, except that the mmmmm field cannot have the values
6087 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
6088 */
6089 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6090 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
6091 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
6092
6093 IEMOP_MNEMONIC(xop, "xop");
6094 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
6095 {
6096 /** @todo Test when exctly the XOP conformance checks kick in during
6097 * instruction decoding and fetching (using \#PF). */
6098 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6099 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6100 if ( ( pVCpu->iem.s.fPrefixes
6101 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6102 == 0)
6103 {
6104 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6105 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6106 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6107 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6108 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6109 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6110 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6111 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6112 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6113
6114 /** @todo XOP: Just use new tables and decoders. */
6115 switch (bRm & 0x1f)
6116 {
6117 case 8: /* xop opcode map 8. */
6118 IEMOP_BITCH_ABOUT_STUB();
6119 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6120
6121 case 9: /* xop opcode map 9. */
6122 IEMOP_BITCH_ABOUT_STUB();
6123 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6124
6125 case 10: /* xop opcode map 10. */
6126 IEMOP_BITCH_ABOUT_STUB();
6127 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6128
6129 default:
6130 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6131 IEMOP_RAISE_INVALID_OPCODE_RET();
6132 }
6133 }
6134 else
6135 Log(("XOP: Invalid prefix mix!\n"));
6136 }
6137 else
6138 Log(("XOP: XOP support disabled!\n"));
6139 IEMOP_RAISE_INVALID_OPCODE_RET();
6140}
6141
6142
6143/**
6144 * Common 'xchg reg,rAX' helper.
6145 */
6146FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6147{
6148 iReg |= pVCpu->iem.s.uRexB;
6149 switch (pVCpu->iem.s.enmEffOpSize)
6150 {
6151 case IEMMODE_16BIT:
6152 IEM_MC_BEGIN(0, 2, 0, 0);
6153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6154 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6155 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6156 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6157 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6158 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6159 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6160 IEM_MC_ADVANCE_RIP_AND_FINISH();
6161 IEM_MC_END();
6162 break;
6163
6164 case IEMMODE_32BIT:
6165 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6167 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6168 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6169 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6170 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6171 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6172 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6173 IEM_MC_ADVANCE_RIP_AND_FINISH();
6174 IEM_MC_END();
6175 break;
6176
6177 case IEMMODE_64BIT:
6178 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6180 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6181 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6182 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6183 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6184 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6185 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6186 IEM_MC_ADVANCE_RIP_AND_FINISH();
6187 IEM_MC_END();
6188 break;
6189
6190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6191 }
6192}
6193
6194
6195/**
6196 * @opcode 0x90
6197 */
6198FNIEMOP_DEF(iemOp_nop)
6199{
6200 /* R8/R8D and RAX/EAX can be exchanged. */
6201 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6202 {
6203 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6204 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6205 }
6206
6207 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6208 {
6209 IEMOP_MNEMONIC(pause, "pause");
6210 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6211 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6212 if (!IEM_IS_IN_GUEST(pVCpu))
6213 { /* probable */ }
6214#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6215 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6216 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmx_pause);
6217#endif
6218#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6219 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6220 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_svm_pause);
6221#endif
6222 }
6223 else
6224 IEMOP_MNEMONIC(nop, "nop");
6225 /** @todo testcase: lock nop; lock pause */
6226 IEM_MC_BEGIN(0, 0, 0, 0);
6227 IEMOP_HLP_DONE_DECODING();
6228 IEM_MC_ADVANCE_RIP_AND_FINISH();
6229 IEM_MC_END();
6230}
6231
6232
6233/**
6234 * @opcode 0x91
6235 */
6236FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6237{
6238 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6239 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6240}
6241
6242
6243/**
6244 * @opcode 0x92
6245 */
6246FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6247{
6248 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6249 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6250}
6251
6252
6253/**
6254 * @opcode 0x93
6255 */
6256FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6257{
6258 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6259 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6260}
6261
6262
6263/**
6264 * @opcode 0x94
6265 */
6266FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6267{
6268 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6269 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6270}
6271
6272
6273/**
6274 * @opcode 0x95
6275 */
6276FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6277{
6278 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6279 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6280}
6281
6282
6283/**
6284 * @opcode 0x96
6285 */
6286FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6287{
6288 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6289 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6290}
6291
6292
6293/**
6294 * @opcode 0x97
6295 */
6296FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6297{
6298 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6299 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6300}
6301
6302
6303/**
6304 * @opcode 0x98
6305 */
6306FNIEMOP_DEF(iemOp_cbw)
6307{
6308 switch (pVCpu->iem.s.enmEffOpSize)
6309 {
6310 case IEMMODE_16BIT:
6311 IEMOP_MNEMONIC(cbw, "cbw");
6312 IEM_MC_BEGIN(0, 1, 0, 0);
6313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6314 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6315 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6316 } IEM_MC_ELSE() {
6317 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6318 } IEM_MC_ENDIF();
6319 IEM_MC_ADVANCE_RIP_AND_FINISH();
6320 IEM_MC_END();
6321 break;
6322
6323 case IEMMODE_32BIT:
6324 IEMOP_MNEMONIC(cwde, "cwde");
6325 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6327 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6328 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6329 } IEM_MC_ELSE() {
6330 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6331 } IEM_MC_ENDIF();
6332 IEM_MC_ADVANCE_RIP_AND_FINISH();
6333 IEM_MC_END();
6334 break;
6335
6336 case IEMMODE_64BIT:
6337 IEMOP_MNEMONIC(cdqe, "cdqe");
6338 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6340 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6341 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6342 } IEM_MC_ELSE() {
6343 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6344 } IEM_MC_ENDIF();
6345 IEM_MC_ADVANCE_RIP_AND_FINISH();
6346 IEM_MC_END();
6347 break;
6348
6349 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6350 }
6351}
6352
6353
6354/**
6355 * @opcode 0x99
6356 */
6357FNIEMOP_DEF(iemOp_cwd)
6358{
6359 switch (pVCpu->iem.s.enmEffOpSize)
6360 {
6361 case IEMMODE_16BIT:
6362 IEMOP_MNEMONIC(cwd, "cwd");
6363 IEM_MC_BEGIN(0, 1, 0, 0);
6364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6365 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6366 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6367 } IEM_MC_ELSE() {
6368 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6369 } IEM_MC_ENDIF();
6370 IEM_MC_ADVANCE_RIP_AND_FINISH();
6371 IEM_MC_END();
6372 break;
6373
6374 case IEMMODE_32BIT:
6375 IEMOP_MNEMONIC(cdq, "cdq");
6376 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6378 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6379 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6380 } IEM_MC_ELSE() {
6381 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6382 } IEM_MC_ENDIF();
6383 IEM_MC_ADVANCE_RIP_AND_FINISH();
6384 IEM_MC_END();
6385 break;
6386
6387 case IEMMODE_64BIT:
6388 IEMOP_MNEMONIC(cqo, "cqo");
6389 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6391 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6392 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6393 } IEM_MC_ELSE() {
6394 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6395 } IEM_MC_ENDIF();
6396 IEM_MC_ADVANCE_RIP_AND_FINISH();
6397 IEM_MC_END();
6398 break;
6399
6400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6401 }
6402}
6403
6404
6405/**
6406 * @opcode 0x9a
6407 */
6408FNIEMOP_DEF(iemOp_call_Ap)
6409{
6410 IEMOP_MNEMONIC(call_Ap, "call Ap");
6411 IEMOP_HLP_NO_64BIT();
6412
6413 /* Decode the far pointer address and pass it on to the far call C implementation. */
6414 uint32_t off32Seg;
6415 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6416 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6417 else
6418 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6419 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6421 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
6422 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
6423 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6424 /** @todo make task-switches, ring-switches, ++ return non-zero status */
6425}
6426
6427
6428/** Opcode 0x9b. (aka fwait) */
6429FNIEMOP_DEF(iemOp_wait)
6430{
6431 IEMOP_MNEMONIC(wait, "wait");
6432 IEM_MC_BEGIN(0, 0, 0, 0);
6433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6434 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6435 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6436 IEM_MC_ADVANCE_RIP_AND_FINISH();
6437 IEM_MC_END();
6438}
6439
6440
6441/**
6442 * @opcode 0x9c
6443 */
6444FNIEMOP_DEF(iemOp_pushf_Fv)
6445{
6446 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6448 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6449 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6450 iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6451}
6452
6453
6454/**
6455 * @opcode 0x9d
6456 */
6457FNIEMOP_DEF(iemOp_popf_Fv)
6458{
6459 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6461 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6462 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6463 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6464 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6465}
6466
6467
6468/**
6469 * @opcode 0x9e
6470 * @opflmodify cf,pf,af,zf,sf
6471 */
6472FNIEMOP_DEF(iemOp_sahf)
6473{
6474 IEMOP_MNEMONIC(sahf, "sahf");
6475 if ( IEM_IS_64BIT_CODE(pVCpu)
6476 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6477 IEMOP_RAISE_INVALID_OPCODE_RET();
6478 IEM_MC_BEGIN(0, 2, 0, 0);
6479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6480 IEM_MC_LOCAL(uint32_t, u32Flags);
6481 IEM_MC_LOCAL(uint32_t, EFlags);
6482 IEM_MC_FETCH_EFLAGS(EFlags);
6483 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6484 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6485 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6486 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6487 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6488 IEM_MC_COMMIT_EFLAGS(EFlags);
6489 IEM_MC_ADVANCE_RIP_AND_FINISH();
6490 IEM_MC_END();
6491}
6492
6493
6494/**
6495 * @opcode 0x9f
6496 * @opfltest cf,pf,af,zf,sf
6497 */
6498FNIEMOP_DEF(iemOp_lahf)
6499{
6500 IEMOP_MNEMONIC(lahf, "lahf");
6501 if ( IEM_IS_64BIT_CODE(pVCpu)
6502 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6503 IEMOP_RAISE_INVALID_OPCODE_RET();
6504 IEM_MC_BEGIN(0, 1, 0, 0);
6505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6506 IEM_MC_LOCAL(uint8_t, u8Flags);
6507 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6508 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6509 IEM_MC_ADVANCE_RIP_AND_FINISH();
6510 IEM_MC_END();
6511}
6512
6513
6514/**
6515 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6516 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6517 * Will return/throw on failures.
6518 * @param a_GCPtrMemOff The variable to store the offset in.
6519 */
6520#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6521 do \
6522 { \
6523 switch (pVCpu->iem.s.enmEffAddrMode) \
6524 { \
6525 case IEMMODE_16BIT: \
6526 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6527 break; \
6528 case IEMMODE_32BIT: \
6529 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6530 break; \
6531 case IEMMODE_64BIT: \
6532 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6533 break; \
6534 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6535 } \
6536 } while (0)
6537
6538/**
6539 * @opcode 0xa0
6540 */
6541FNIEMOP_DEF(iemOp_mov_AL_Ob)
6542{
6543 /*
6544 * Get the offset.
6545 */
6546 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6547 RTGCPTR GCPtrMemOffDecode;
6548 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6549
6550 /*
6551 * Fetch AL.
6552 */
6553 IEM_MC_BEGIN(0, 2, 0, 0);
6554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6555 IEM_MC_LOCAL(uint8_t, u8Tmp);
6556 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6557 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6558 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6559 IEM_MC_ADVANCE_RIP_AND_FINISH();
6560 IEM_MC_END();
6561}
6562
6563
6564/**
6565 * @opcode 0xa1
6566 */
6567FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6568{
6569 /*
6570 * Get the offset.
6571 */
6572 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6573 RTGCPTR GCPtrMemOffDecode;
6574 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6575
6576 /*
6577 * Fetch rAX.
6578 */
6579 switch (pVCpu->iem.s.enmEffOpSize)
6580 {
6581 case IEMMODE_16BIT:
6582 IEM_MC_BEGIN(0, 2, 0, 0);
6583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6584 IEM_MC_LOCAL(uint16_t, u16Tmp);
6585 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6586 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6587 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6588 IEM_MC_ADVANCE_RIP_AND_FINISH();
6589 IEM_MC_END();
6590 break;
6591
6592 case IEMMODE_32BIT:
6593 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6595 IEM_MC_LOCAL(uint32_t, u32Tmp);
6596 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6597 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6598 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6599 IEM_MC_ADVANCE_RIP_AND_FINISH();
6600 IEM_MC_END();
6601 break;
6602
6603 case IEMMODE_64BIT:
6604 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6606 IEM_MC_LOCAL(uint64_t, u64Tmp);
6607 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6608 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6609 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6610 IEM_MC_ADVANCE_RIP_AND_FINISH();
6611 IEM_MC_END();
6612 break;
6613
6614 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6615 }
6616}
6617
6618
6619/**
6620 * @opcode 0xa2
6621 */
6622FNIEMOP_DEF(iemOp_mov_Ob_AL)
6623{
6624 /*
6625 * Get the offset.
6626 */
6627 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6628 RTGCPTR GCPtrMemOffDecode;
6629 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6630
6631 /*
6632 * Store AL.
6633 */
6634 IEM_MC_BEGIN(0, 2, 0, 0);
6635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6636 IEM_MC_LOCAL(uint8_t, u8Tmp);
6637 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6638 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6639 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6640 IEM_MC_ADVANCE_RIP_AND_FINISH();
6641 IEM_MC_END();
6642}
6643
6644
6645/**
6646 * @opcode 0xa3
6647 */
6648FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6649{
6650 /*
6651 * Get the offset.
6652 */
6653 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6654 RTGCPTR GCPtrMemOffDecode;
6655 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6656
6657 /*
6658 * Store rAX.
6659 */
6660 switch (pVCpu->iem.s.enmEffOpSize)
6661 {
6662 case IEMMODE_16BIT:
6663 IEM_MC_BEGIN(0, 2, 0, 0);
6664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6665 IEM_MC_LOCAL(uint16_t, u16Tmp);
6666 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6667 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6668 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6669 IEM_MC_ADVANCE_RIP_AND_FINISH();
6670 IEM_MC_END();
6671 break;
6672
6673 case IEMMODE_32BIT:
6674 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6676 IEM_MC_LOCAL(uint32_t, u32Tmp);
6677 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6678 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6679 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6680 IEM_MC_ADVANCE_RIP_AND_FINISH();
6681 IEM_MC_END();
6682 break;
6683
6684 case IEMMODE_64BIT:
6685 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6687 IEM_MC_LOCAL(uint64_t, u64Tmp);
6688 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6689 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6690 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6691 IEM_MC_ADVANCE_RIP_AND_FINISH();
6692 IEM_MC_END();
6693 break;
6694
6695 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6696 }
6697}
6698
6699/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6700#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
6701 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6703 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6704 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6705 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6706 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6707 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6708 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6709 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6710 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6711 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6712 } IEM_MC_ELSE() { \
6713 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6714 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6715 } IEM_MC_ENDIF(); \
6716 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6717 IEM_MC_END() \
6718
6719/**
6720 * @opcode 0xa4
6721 * @opfltest df
6722 */
6723FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6724{
6725 /*
6726 * Use the C implementation if a repeat prefix is encountered.
6727 */
6728 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6729 {
6730 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6732 switch (pVCpu->iem.s.enmEffAddrMode)
6733 {
6734 case IEMMODE_16BIT:
6735 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6736 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6737 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6738 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6739 iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6740 case IEMMODE_32BIT:
6741 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6742 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6743 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6744 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6745 iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6746 case IEMMODE_64BIT:
6747 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6748 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6749 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6750 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6751 iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6752 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6753 }
6754 }
6755
6756 /*
6757 * Sharing case implementation with movs[wdq] below.
6758 */
6759 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6760 switch (pVCpu->iem.s.enmEffAddrMode)
6761 {
6762 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6763 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6764 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
6765 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6766 }
6767}
6768
6769
6770/**
6771 * @opcode 0xa5
6772 * @opfltest df
6773 */
6774FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6775{
6776
6777 /*
6778 * Use the C implementation if a repeat prefix is encountered.
6779 */
6780 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6781 {
6782 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6784 switch (pVCpu->iem.s.enmEffOpSize)
6785 {
6786 case IEMMODE_16BIT:
6787 switch (pVCpu->iem.s.enmEffAddrMode)
6788 {
6789 case IEMMODE_16BIT:
6790 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6791 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6792 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6793 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6794 iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6795 case IEMMODE_32BIT:
6796 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6797 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6798 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6799 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6800 iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6801 case IEMMODE_64BIT:
6802 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6803 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6804 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6805 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6806 iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6807 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6808 }
6809 break;
6810 case IEMMODE_32BIT:
6811 switch (pVCpu->iem.s.enmEffAddrMode)
6812 {
6813 case IEMMODE_16BIT:
6814 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6815 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6816 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6817 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6818 iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6819 case IEMMODE_32BIT:
6820 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6821 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6822 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6823 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6824 iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6825 case IEMMODE_64BIT:
6826 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6827 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6828 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6829 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6830 iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6831 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6832 }
6833 case IEMMODE_64BIT:
6834 switch (pVCpu->iem.s.enmEffAddrMode)
6835 {
6836 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6837 case IEMMODE_32BIT:
6838 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6839 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6840 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6841 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6842 iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6843 case IEMMODE_64BIT:
6844 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6845 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6846 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6847 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6848 iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6850 }
6851 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6852 }
6853 }
6854
6855 /*
6856 * Annoying double switch here.
6857 * Using ugly macro for implementing the cases, sharing it with movsb.
6858 */
6859 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6860 switch (pVCpu->iem.s.enmEffOpSize)
6861 {
6862 case IEMMODE_16BIT:
6863 switch (pVCpu->iem.s.enmEffAddrMode)
6864 {
6865 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6866 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6867 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
6868 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6869 }
6870 break;
6871
6872 case IEMMODE_32BIT:
6873 switch (pVCpu->iem.s.enmEffAddrMode)
6874 {
6875 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6876 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6877 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
6878 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6879 }
6880 break;
6881
6882 case IEMMODE_64BIT:
6883 switch (pVCpu->iem.s.enmEffAddrMode)
6884 {
6885 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6886 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
6887 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
6888 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6889 }
6890 break;
6891 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6892 }
6893}
6894
6895#undef IEM_MOVS_CASE
6896
6897/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6898#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
6899 IEM_MC_BEGIN(3, 3, a_fMcFlags, 0); \
6900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6901 \
6902 IEM_MC_LOCAL(RTGCPTR, uAddr1); \
6903 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr1, X86_GREG_xSI); \
6904 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6905 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr1); \
6906 \
6907 IEM_MC_LOCAL(RTGCPTR, uAddr2); \
6908 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr2, X86_GREG_xDI); \
6909 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6910 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr2); \
6911 \
6912 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6913 IEM_MC_REF_EFLAGS(pEFlags); \
6914 IEM_MC_ARG_LOCAL_REF(uint##ValBits##_t *, puValue1, uValue1, 0); \
6915 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6916 \
6917 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6918 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6919 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6920 } IEM_MC_ELSE() { \
6921 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6922 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6923 } IEM_MC_ENDIF(); \
6924 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6925 IEM_MC_END() \
6926
6927/**
6928 * @opcode 0xa6
6929 * @opflclass arithmetic
6930 * @opfltest df
6931 */
6932FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6933{
6934
6935 /*
6936 * Use the C implementation if a repeat prefix is encountered.
6937 */
6938 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6939 {
6940 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6942 switch (pVCpu->iem.s.enmEffAddrMode)
6943 {
6944 case IEMMODE_16BIT:
6945 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6946 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6947 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6948 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6949 iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6950 case IEMMODE_32BIT:
6951 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6952 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6953 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6954 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6955 iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6956 case IEMMODE_64BIT:
6957 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6958 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6959 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6960 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6961 iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6962 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6963 }
6964 }
6965 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6966 {
6967 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6969 switch (pVCpu->iem.s.enmEffAddrMode)
6970 {
6971 case IEMMODE_16BIT:
6972 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6973 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6974 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6975 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6976 iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6977 case IEMMODE_32BIT:
6978 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6979 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6980 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6981 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6982 iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6983 case IEMMODE_64BIT:
6984 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6985 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6986 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6987 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6988 iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6989 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6990 }
6991 }
6992
6993 /*
6994 * Sharing case implementation with cmps[wdq] below.
6995 */
6996 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
6997 switch (pVCpu->iem.s.enmEffAddrMode)
6998 {
6999 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7000 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7001 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
7002 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7003 }
7004}
7005
7006
7007/**
7008 * @opcode 0xa7
7009 * @opflclass arithmetic
7010 * @opfltest df
7011 */
7012FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
7013{
7014 /*
7015 * Use the C implementation if a repeat prefix is encountered.
7016 */
7017 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7018 {
7019 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
7020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7021 switch (pVCpu->iem.s.enmEffOpSize)
7022 {
7023 case IEMMODE_16BIT:
7024 switch (pVCpu->iem.s.enmEffAddrMode)
7025 {
7026 case IEMMODE_16BIT:
7027 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7028 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7029 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7030 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7031 iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7032 case IEMMODE_32BIT:
7033 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7034 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7035 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7036 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7037 iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7038 case IEMMODE_64BIT:
7039 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7040 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7041 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7042 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7043 iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7044 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7045 }
7046 break;
7047 case IEMMODE_32BIT:
7048 switch (pVCpu->iem.s.enmEffAddrMode)
7049 {
7050 case IEMMODE_16BIT:
7051 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7052 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7053 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7054 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7055 iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7056 case IEMMODE_32BIT:
7057 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7058 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7059 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7060 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7061 iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7062 case IEMMODE_64BIT:
7063 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7064 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7065 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7066 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7067 iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7068 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7069 }
7070 case IEMMODE_64BIT:
7071 switch (pVCpu->iem.s.enmEffAddrMode)
7072 {
7073 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
7074 case IEMMODE_32BIT:
7075 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7076 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7077 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7078 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7079 iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7080 case IEMMODE_64BIT:
7081 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7082 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7083 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7084 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7085 iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7087 }
7088 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7089 }
7090 }
7091
7092 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7093 {
7094 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
7095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7096 switch (pVCpu->iem.s.enmEffOpSize)
7097 {
7098 case IEMMODE_16BIT:
7099 switch (pVCpu->iem.s.enmEffAddrMode)
7100 {
7101 case IEMMODE_16BIT:
7102 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7103 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7104 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7105 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7106 iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7107 case IEMMODE_32BIT:
7108 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7109 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7110 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7111 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7112 iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7113 case IEMMODE_64BIT:
7114 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7115 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7116 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7117 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7118 iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7119 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7120 }
7121 break;
7122 case IEMMODE_32BIT:
7123 switch (pVCpu->iem.s.enmEffAddrMode)
7124 {
7125 case IEMMODE_16BIT:
7126 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7127 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7128 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7129 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7130 iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7131 case IEMMODE_32BIT:
7132 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7133 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7134 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7135 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7136 iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7137 case IEMMODE_64BIT:
7138 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7139 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7140 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7141 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7142 iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7143 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7144 }
7145 case IEMMODE_64BIT:
7146 switch (pVCpu->iem.s.enmEffAddrMode)
7147 {
7148 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
7149 case IEMMODE_32BIT:
7150 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7151 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7152 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7153 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7154 iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7155 case IEMMODE_64BIT:
7156 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7157 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7158 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7159 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7160 iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7162 }
7163 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7164 }
7165 }
7166
7167 /*
7168 * Annoying double switch here.
7169 * Using ugly macro for implementing the cases, sharing it with cmpsb.
7170 */
7171 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
7172 switch (pVCpu->iem.s.enmEffOpSize)
7173 {
7174 case IEMMODE_16BIT:
7175 switch (pVCpu->iem.s.enmEffAddrMode)
7176 {
7177 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7178 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7179 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
7180 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7181 }
7182 break;
7183
7184 case IEMMODE_32BIT:
7185 switch (pVCpu->iem.s.enmEffAddrMode)
7186 {
7187 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7188 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7189 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
7190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7191 }
7192 break;
7193
7194 case IEMMODE_64BIT:
7195 switch (pVCpu->iem.s.enmEffAddrMode)
7196 {
7197 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7198 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
7199 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
7200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7201 }
7202 break;
7203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7204 }
7205}
7206
7207#undef IEM_CMPS_CASE
7208
7209/**
7210 * @opcode 0xa8
7211 * @opflclass logical
7212 */
7213FNIEMOP_DEF(iemOp_test_AL_Ib)
7214{
7215 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
7216 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7217 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
7218}
7219
7220
7221/**
7222 * @opcode 0xa9
7223 * @opflclass logical
7224 */
7225FNIEMOP_DEF(iemOp_test_eAX_Iz)
7226{
7227 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
7228 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7229 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
7230}
7231
7232
7233/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
7234#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
7235 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7237 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7238 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7239 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
7240 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7241 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7242 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7243 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7244 } IEM_MC_ELSE() { \
7245 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7246 } IEM_MC_ENDIF(); \
7247 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7248 IEM_MC_END() \
7249
7250/**
7251 * @opcode 0xaa
7252 */
7253FNIEMOP_DEF(iemOp_stosb_Yb_AL)
7254{
7255 /*
7256 * Use the C implementation if a repeat prefix is encountered.
7257 */
7258 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7259 {
7260 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
7261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7262 switch (pVCpu->iem.s.enmEffAddrMode)
7263 {
7264 case IEMMODE_16BIT:
7265 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7266 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7267 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7268 iemCImpl_stos_al_m16);
7269 case IEMMODE_32BIT:
7270 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7271 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7272 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7273 iemCImpl_stos_al_m32);
7274 case IEMMODE_64BIT:
7275 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7276 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7277 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7278 iemCImpl_stos_al_m64);
7279 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7280 }
7281 }
7282
7283 /*
7284 * Sharing case implementation with stos[wdq] below.
7285 */
7286 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
7287 switch (pVCpu->iem.s.enmEffAddrMode)
7288 {
7289 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7290 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7291 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
7292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7293 }
7294}
7295
7296
7297/**
7298 * @opcode 0xab
7299 */
7300FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7301{
7302 /*
7303 * Use the C implementation if a repeat prefix is encountered.
7304 */
7305 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7306 {
7307 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7309 switch (pVCpu->iem.s.enmEffOpSize)
7310 {
7311 case IEMMODE_16BIT:
7312 switch (pVCpu->iem.s.enmEffAddrMode)
7313 {
7314 case IEMMODE_16BIT:
7315 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7316 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7317 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7318 iemCImpl_stos_ax_m16);
7319 case IEMMODE_32BIT:
7320 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7321 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7322 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7323 iemCImpl_stos_ax_m32);
7324 case IEMMODE_64BIT:
7325 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7326 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7327 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7328 iemCImpl_stos_ax_m64);
7329 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7330 }
7331 break;
7332 case IEMMODE_32BIT:
7333 switch (pVCpu->iem.s.enmEffAddrMode)
7334 {
7335 case IEMMODE_16BIT:
7336 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7337 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7338 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7339 iemCImpl_stos_eax_m16);
7340 case IEMMODE_32BIT:
7341 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7342 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7343 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7344 iemCImpl_stos_eax_m32);
7345 case IEMMODE_64BIT:
7346 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7347 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7348 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7349 iemCImpl_stos_eax_m64);
7350 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7351 }
7352 case IEMMODE_64BIT:
7353 switch (pVCpu->iem.s.enmEffAddrMode)
7354 {
7355 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7356 case IEMMODE_32BIT:
7357 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7358 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7359 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7360 iemCImpl_stos_rax_m32);
7361 case IEMMODE_64BIT:
7362 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7363 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7364 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7365 iemCImpl_stos_rax_m64);
7366 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7367 }
7368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7369 }
7370 }
7371
7372 /*
7373 * Annoying double switch here.
7374 * Using ugly macro for implementing the cases, sharing it with stosb.
7375 */
7376 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7377 switch (pVCpu->iem.s.enmEffOpSize)
7378 {
7379 case IEMMODE_16BIT:
7380 switch (pVCpu->iem.s.enmEffAddrMode)
7381 {
7382 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7383 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7384 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7385 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7386 }
7387 break;
7388
7389 case IEMMODE_32BIT:
7390 switch (pVCpu->iem.s.enmEffAddrMode)
7391 {
7392 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7393 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7394 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7395 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7396 }
7397 break;
7398
7399 case IEMMODE_64BIT:
7400 switch (pVCpu->iem.s.enmEffAddrMode)
7401 {
7402 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7403 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7404 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7405 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7406 }
7407 break;
7408 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7409 }
7410}
7411
7412#undef IEM_STOS_CASE
7413
7414/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7415#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7416 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7418 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7419 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7420 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7421 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7422 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7423 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7424 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7425 } IEM_MC_ELSE() { \
7426 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7427 } IEM_MC_ENDIF(); \
7428 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7429 IEM_MC_END() \
7430
7431/**
7432 * @opcode 0xac
7433 * @opfltest df
7434 */
7435FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7436{
7437 /*
7438 * Use the C implementation if a repeat prefix is encountered.
7439 */
7440 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7441 {
7442 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7444 switch (pVCpu->iem.s.enmEffAddrMode)
7445 {
7446 case IEMMODE_16BIT:
7447 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7448 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7449 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7450 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7451 iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7452 case IEMMODE_32BIT:
7453 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7454 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7455 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7456 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7457 iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7458 case IEMMODE_64BIT:
7459 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7460 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7461 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7462 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7463 iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7464 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7465 }
7466 }
7467
7468 /*
7469 * Sharing case implementation with stos[wdq] below.
7470 */
7471 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7472 switch (pVCpu->iem.s.enmEffAddrMode)
7473 {
7474 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7475 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7476 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7477 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7478 }
7479}
7480
7481
7482/**
7483 * @opcode 0xad
7484 * @opfltest df
7485 */
7486FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7487{
7488 /*
7489 * Use the C implementation if a repeat prefix is encountered.
7490 */
7491 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7492 {
7493 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7495 switch (pVCpu->iem.s.enmEffOpSize)
7496 {
7497 case IEMMODE_16BIT:
7498 switch (pVCpu->iem.s.enmEffAddrMode)
7499 {
7500 case IEMMODE_16BIT:
7501 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7502 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7503 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7504 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7505 iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7506 case IEMMODE_32BIT:
7507 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7508 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7509 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7510 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7511 iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7512 case IEMMODE_64BIT:
7513 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7514 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7515 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7516 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7517 iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7518 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7519 }
7520 break;
7521 case IEMMODE_32BIT:
7522 switch (pVCpu->iem.s.enmEffAddrMode)
7523 {
7524 case IEMMODE_16BIT:
7525 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7526 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7527 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7528 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7529 iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7530 case IEMMODE_32BIT:
7531 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7532 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7533 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7534 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7535 iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7536 case IEMMODE_64BIT:
7537 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7538 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7539 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7540 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7541 iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7542 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7543 }
7544 case IEMMODE_64BIT:
7545 switch (pVCpu->iem.s.enmEffAddrMode)
7546 {
7547 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7548 case IEMMODE_32BIT:
7549 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7550 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7551 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7552 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7553 iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7554 case IEMMODE_64BIT:
7555 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7556 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7557 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7558 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7559 iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7560 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7561 }
7562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7563 }
7564 }
7565
7566 /*
7567 * Annoying double switch here.
7568 * Using ugly macro for implementing the cases, sharing it with lodsb.
7569 */
7570 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7571 switch (pVCpu->iem.s.enmEffOpSize)
7572 {
7573 case IEMMODE_16BIT:
7574 switch (pVCpu->iem.s.enmEffAddrMode)
7575 {
7576 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7577 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7578 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
7579 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7580 }
7581 break;
7582
7583 case IEMMODE_32BIT:
7584 switch (pVCpu->iem.s.enmEffAddrMode)
7585 {
7586 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7587 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7588 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
7589 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7590 }
7591 break;
7592
7593 case IEMMODE_64BIT:
7594 switch (pVCpu->iem.s.enmEffAddrMode)
7595 {
7596 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7597 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
7598 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
7599 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7600 }
7601 break;
7602 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7603 }
7604}
7605
7606#undef IEM_LODS_CASE
7607
7608/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7609#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
7610 IEM_MC_BEGIN(3, 2, a_fMcFlags, 0); \
7611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7612 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7613 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7614 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7615 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7616 \
7617 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7618 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7619 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7620 IEM_MC_REF_EFLAGS(pEFlags); \
7621 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7622 \
7623 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7624 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7625 } IEM_MC_ELSE() { \
7626 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7627 } IEM_MC_ENDIF(); \
7628 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7629 IEM_MC_END();
7630
7631/**
7632 * @opcode 0xae
7633 * @opflclass arithmetic
7634 * @opfltest df
7635 */
7636FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7637{
7638 /*
7639 * Use the C implementation if a repeat prefix is encountered.
7640 */
7641 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7642 {
7643 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7645 switch (pVCpu->iem.s.enmEffAddrMode)
7646 {
7647 case IEMMODE_16BIT:
7648 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7649 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7650 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7651 iemCImpl_repe_scas_al_m16);
7652 case IEMMODE_32BIT:
7653 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7654 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7655 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7656 iemCImpl_repe_scas_al_m32);
7657 case IEMMODE_64BIT:
7658 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7659 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7660 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7661 iemCImpl_repe_scas_al_m64);
7662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7663 }
7664 }
7665 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7666 {
7667 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7669 switch (pVCpu->iem.s.enmEffAddrMode)
7670 {
7671 case IEMMODE_16BIT:
7672 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7673 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7674 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7675 iemCImpl_repne_scas_al_m16);
7676 case IEMMODE_32BIT:
7677 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7678 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7679 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7680 iemCImpl_repne_scas_al_m32);
7681 case IEMMODE_64BIT:
7682 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7683 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7684 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7685 iemCImpl_repne_scas_al_m64);
7686 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7687 }
7688 }
7689
7690 /*
7691 * Sharing case implementation with stos[wdq] below.
7692 */
7693 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7694 switch (pVCpu->iem.s.enmEffAddrMode)
7695 {
7696 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7697 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7698 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
7699 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7700 }
7701}
7702
7703
7704/**
7705 * @opcode 0xaf
7706 * @opflclass arithmetic
7707 * @opfltest df
7708 */
7709FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7710{
7711 /*
7712 * Use the C implementation if a repeat prefix is encountered.
7713 */
7714 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7715 {
7716 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7718 switch (pVCpu->iem.s.enmEffOpSize)
7719 {
7720 case IEMMODE_16BIT:
7721 switch (pVCpu->iem.s.enmEffAddrMode)
7722 {
7723 case IEMMODE_16BIT:
7724 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7725 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7726 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7727 iemCImpl_repe_scas_ax_m16);
7728 case IEMMODE_32BIT:
7729 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7730 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7731 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7732 iemCImpl_repe_scas_ax_m32);
7733 case IEMMODE_64BIT:
7734 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7735 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7736 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7737 iemCImpl_repe_scas_ax_m64);
7738 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7739 }
7740 break;
7741 case IEMMODE_32BIT:
7742 switch (pVCpu->iem.s.enmEffAddrMode)
7743 {
7744 case IEMMODE_16BIT:
7745 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7746 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7747 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7748 iemCImpl_repe_scas_eax_m16);
7749 case IEMMODE_32BIT:
7750 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7751 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7752 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7753 iemCImpl_repe_scas_eax_m32);
7754 case IEMMODE_64BIT:
7755 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7756 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7757 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7758 iemCImpl_repe_scas_eax_m64);
7759 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7760 }
7761 case IEMMODE_64BIT:
7762 switch (pVCpu->iem.s.enmEffAddrMode)
7763 {
7764 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7765 case IEMMODE_32BIT:
7766 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7767 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7768 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7769 iemCImpl_repe_scas_rax_m32);
7770 case IEMMODE_64BIT:
7771 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7772 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7773 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7774 iemCImpl_repe_scas_rax_m64);
7775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7776 }
7777 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7778 }
7779 }
7780 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7781 {
7782 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7784 switch (pVCpu->iem.s.enmEffOpSize)
7785 {
7786 case IEMMODE_16BIT:
7787 switch (pVCpu->iem.s.enmEffAddrMode)
7788 {
7789 case IEMMODE_16BIT:
7790 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7791 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7792 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7793 iemCImpl_repne_scas_ax_m16);
7794 case IEMMODE_32BIT:
7795 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7796 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7797 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7798 iemCImpl_repne_scas_ax_m32);
7799 case IEMMODE_64BIT:
7800 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7801 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7802 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7803 iemCImpl_repne_scas_ax_m64);
7804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7805 }
7806 break;
7807 case IEMMODE_32BIT:
7808 switch (pVCpu->iem.s.enmEffAddrMode)
7809 {
7810 case IEMMODE_16BIT:
7811 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7812 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7813 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7814 iemCImpl_repne_scas_eax_m16);
7815 case IEMMODE_32BIT:
7816 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7817 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7818 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7819 iemCImpl_repne_scas_eax_m32);
7820 case IEMMODE_64BIT:
7821 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7822 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7823 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7824 iemCImpl_repne_scas_eax_m64);
7825 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7826 }
7827 case IEMMODE_64BIT:
7828 switch (pVCpu->iem.s.enmEffAddrMode)
7829 {
7830 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
7831 case IEMMODE_32BIT:
7832 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7833 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7834 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7835 iemCImpl_repne_scas_rax_m32);
7836 case IEMMODE_64BIT:
7837 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7838 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7839 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7840 iemCImpl_repne_scas_rax_m64);
7841 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7842 }
7843 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7844 }
7845 }
7846
7847 /*
7848 * Annoying double switch here.
7849 * Using ugly macro for implementing the cases, sharing it with scasb.
7850 */
7851 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
7852 switch (pVCpu->iem.s.enmEffOpSize)
7853 {
7854 case IEMMODE_16BIT:
7855 switch (pVCpu->iem.s.enmEffAddrMode)
7856 {
7857 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7858 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7859 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
7860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7861 }
7862 break;
7863
7864 case IEMMODE_32BIT:
7865 switch (pVCpu->iem.s.enmEffAddrMode)
7866 {
7867 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7868 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7869 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
7870 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7871 }
7872 break;
7873
7874 case IEMMODE_64BIT:
7875 switch (pVCpu->iem.s.enmEffAddrMode)
7876 {
7877 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7878 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
7879 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
7880 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7881 }
7882 break;
7883 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7884 }
7885}
7886
7887#undef IEM_SCAS_CASE
7888
7889/**
7890 * Common 'mov r8, imm8' helper.
7891 */
7892FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
7893{
7894 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7895 IEM_MC_BEGIN(0, 0, 0, 0);
7896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7897 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
7898 IEM_MC_ADVANCE_RIP_AND_FINISH();
7899 IEM_MC_END();
7900}
7901
7902
7903/**
7904 * @opcode 0xb0
7905 */
7906FNIEMOP_DEF(iemOp_mov_AL_Ib)
7907{
7908 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
7909 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7910}
7911
7912
7913/**
7914 * @opcode 0xb1
7915 */
7916FNIEMOP_DEF(iemOp_CL_Ib)
7917{
7918 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
7919 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7920}
7921
7922
7923/**
7924 * @opcode 0xb2
7925 */
7926FNIEMOP_DEF(iemOp_DL_Ib)
7927{
7928 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
7929 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7930}
7931
7932
7933/**
7934 * @opcode 0xb3
7935 */
7936FNIEMOP_DEF(iemOp_BL_Ib)
7937{
7938 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
7939 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7940}
7941
7942
7943/**
7944 * @opcode 0xb4
7945 */
7946FNIEMOP_DEF(iemOp_mov_AH_Ib)
7947{
7948 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
7949 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7950}
7951
7952
7953/**
7954 * @opcode 0xb5
7955 */
7956FNIEMOP_DEF(iemOp_CH_Ib)
7957{
7958 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
7959 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7960}
7961
7962
7963/**
7964 * @opcode 0xb6
7965 */
7966FNIEMOP_DEF(iemOp_DH_Ib)
7967{
7968 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
7969 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7970}
7971
7972
7973/**
7974 * @opcode 0xb7
7975 */
7976FNIEMOP_DEF(iemOp_BH_Ib)
7977{
7978 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
7979 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7980}
7981
7982
7983/**
7984 * Common 'mov regX,immX' helper.
7985 */
7986FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
7987{
7988 switch (pVCpu->iem.s.enmEffOpSize)
7989 {
7990 case IEMMODE_16BIT:
7991 IEM_MC_BEGIN(0, 0, 0, 0);
7992 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7994 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
7995 IEM_MC_ADVANCE_RIP_AND_FINISH();
7996 IEM_MC_END();
7997 break;
7998
7999 case IEMMODE_32BIT:
8000 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8001 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8003 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
8004 IEM_MC_ADVANCE_RIP_AND_FINISH();
8005 IEM_MC_END();
8006 break;
8007
8008 case IEMMODE_64BIT:
8009 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8010 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
8011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8012 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
8013 IEM_MC_ADVANCE_RIP_AND_FINISH();
8014 IEM_MC_END();
8015 break;
8016 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8017 }
8018}
8019
8020
8021/**
8022 * @opcode 0xb8
8023 */
8024FNIEMOP_DEF(iemOp_eAX_Iv)
8025{
8026 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
8027 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8028}
8029
8030
8031/**
8032 * @opcode 0xb9
8033 */
8034FNIEMOP_DEF(iemOp_eCX_Iv)
8035{
8036 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
8037 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8038}
8039
8040
8041/**
8042 * @opcode 0xba
8043 */
8044FNIEMOP_DEF(iemOp_eDX_Iv)
8045{
8046 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
8047 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8048}
8049
8050
8051/**
8052 * @opcode 0xbb
8053 */
8054FNIEMOP_DEF(iemOp_eBX_Iv)
8055{
8056 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
8057 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8058}
8059
8060
8061/**
8062 * @opcode 0xbc
8063 */
8064FNIEMOP_DEF(iemOp_eSP_Iv)
8065{
8066 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
8067 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8068}
8069
8070
8071/**
8072 * @opcode 0xbd
8073 */
8074FNIEMOP_DEF(iemOp_eBP_Iv)
8075{
8076 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
8077 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8078}
8079
8080
8081/**
8082 * @opcode 0xbe
8083 */
8084FNIEMOP_DEF(iemOp_eSI_Iv)
8085{
8086 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
8087 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8088}
8089
8090
8091/**
8092 * @opcode 0xbf
8093 */
8094FNIEMOP_DEF(iemOp_eDI_Iv)
8095{
8096 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
8097 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8098}
8099
8100
8101/**
8102 * @opcode 0xc0
8103 */
8104FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
8105{
8106 IEMOP_HLP_MIN_186();
8107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8108
8109 /* Need to use a body macro here since the EFLAGS behaviour differs between
8110 the shifts, rotates and rotate w/ carry. Sigh. */
8111#define GRP2_BODY_Eb_Ib(a_pImplExpr) \
8112 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8113 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8114 { \
8115 /* register */ \
8116 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8117 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); \
8118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8119 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8120 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8121 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8122 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8123 IEM_MC_REF_EFLAGS(pEFlags); \
8124 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8125 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8126 IEM_MC_END(); \
8127 } \
8128 else \
8129 { \
8130 /* memory */ \
8131 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0); \
8132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8134 \
8135 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8137 \
8138 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8139 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8140 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8141 \
8142 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8143 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8144 IEM_MC_FETCH_EFLAGS(EFlags); \
8145 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8146 \
8147 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8148 IEM_MC_COMMIT_EFLAGS(EFlags); \
8149 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8150 IEM_MC_END(); \
8151 } (void)0
8152
8153 switch (IEM_GET_MODRM_REG_8(bRm))
8154 {
8155 /**
8156 * @opdone
8157 * @opmaps grp2_c0
8158 * @opcode /0
8159 * @opflclass rotate_count
8160 */
8161 case 0:
8162 {
8163 IEMOP_MNEMONIC2(MI, ROL, rol, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8164 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8165 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8166 break;
8167 }
8168 /**
8169 * @opdone
8170 * @opmaps grp2_c0
8171 * @opcode /1
8172 * @opflclass rotate_count
8173 */
8174 case 1:
8175 {
8176 IEMOP_MNEMONIC2(MI, ROR, ror, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8177 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8178 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8179 break;
8180 }
8181 /**
8182 * @opdone
8183 * @opmaps grp2_c0
8184 * @opcode /2
8185 * @opflclass rotate_carry_count
8186 */
8187 case 2:
8188 {
8189 IEMOP_MNEMONIC2(MI, RCL, rcl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8190 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8191 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8192 break;
8193 }
8194 /**
8195 * @opdone
8196 * @opmaps grp2_c0
8197 * @opcode /3
8198 * @opflclass rotate_carry_count
8199 */
8200 case 3:
8201 {
8202 IEMOP_MNEMONIC2(MI, RCR, rcr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8203 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8204 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8205 break;
8206 }
8207 /**
8208 * @opdone
8209 * @opmaps grp2_c0
8210 * @opcode /4
8211 * @opflclass shift_count
8212 */
8213 case 4:
8214 {
8215 IEMOP_MNEMONIC2(MI, SHL, shl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8216 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8217 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8218 break;
8219 }
8220 /**
8221 * @opdone
8222 * @opmaps grp2_c0
8223 * @opcode /5
8224 * @opflclass shift_count
8225 */
8226 case 5:
8227 {
8228 IEMOP_MNEMONIC2(MI, SHR, shr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8229 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8230 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8231 break;
8232 }
8233 /**
8234 * @opdone
8235 * @opmaps grp2_c0
8236 * @opcode /7
8237 * @opflclass shift_count
8238 */
8239 case 7:
8240 {
8241 IEMOP_MNEMONIC2(MI, SAR, sar, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8242 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8243 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8244 break;
8245 }
8246
8247 /** @opdone */
8248 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8249 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8250 }
8251#undef GRP2_BODY_Eb_Ib
8252}
8253
8254
8255/* Need to use a body macro here since the EFLAGS behaviour differs between
8256 the shifts, rotates and rotate w/ carry. Sigh. */
8257#define GRP2_BODY_Ev_Ib(a_pImplExpr) \
8258 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8259 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8260 { \
8261 /* register */ \
8262 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8263 switch (pVCpu->iem.s.enmEffOpSize) \
8264 { \
8265 case IEMMODE_16BIT: \
8266 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); \
8267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8268 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8269 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8270 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8271 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8272 IEM_MC_REF_EFLAGS(pEFlags); \
8273 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
8274 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8275 IEM_MC_END(); \
8276 break; \
8277 \
8278 case IEMMODE_32BIT: \
8279 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
8280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8281 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8282 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8283 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8284 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8285 IEM_MC_REF_EFLAGS(pEFlags); \
8286 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
8287 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8288 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8289 IEM_MC_END(); \
8290 break; \
8291 \
8292 case IEMMODE_64BIT: \
8293 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
8294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8295 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8296 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8297 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8298 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8299 IEM_MC_REF_EFLAGS(pEFlags); \
8300 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
8301 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8302 IEM_MC_END(); \
8303 break; \
8304 \
8305 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8306 } \
8307 } \
8308 else \
8309 { \
8310 /* memory */ \
8311 switch (pVCpu->iem.s.enmEffOpSize) \
8312 { \
8313 case IEMMODE_16BIT: \
8314 IEM_MC_BEGIN(3, 3, 0, 0); \
8315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8317 \
8318 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8320 \
8321 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8322 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8323 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8324 \
8325 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8326 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8327 IEM_MC_FETCH_EFLAGS(EFlags); \
8328 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
8329 \
8330 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8331 IEM_MC_COMMIT_EFLAGS(EFlags); \
8332 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8333 IEM_MC_END(); \
8334 break; \
8335 \
8336 case IEMMODE_32BIT: \
8337 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
8338 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8340 \
8341 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8343 \
8344 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8345 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8346 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8347 \
8348 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8349 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8350 IEM_MC_FETCH_EFLAGS(EFlags); \
8351 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
8352 \
8353 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8354 IEM_MC_COMMIT_EFLAGS(EFlags); \
8355 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8356 IEM_MC_END(); \
8357 break; \
8358 \
8359 case IEMMODE_64BIT: \
8360 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
8361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8363 \
8364 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8366 \
8367 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8368 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8369 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8370 \
8371 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8372 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8373 IEM_MC_FETCH_EFLAGS(EFlags); \
8374 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
8375 \
8376 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8377 IEM_MC_COMMIT_EFLAGS(EFlags); \
8378 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8379 IEM_MC_END(); \
8380 break; \
8381 \
8382 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8383 } \
8384 } (void)0
8385
8386/**
8387 * @opmaps grp2_c1
8388 * @opcode /0
8389 * @opflclass rotate_count
8390 */
8391FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_Ib, uint8_t, bRm)
8392{
8393 IEMOP_MNEMONIC2(MI, ROL, rol, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8394 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8395}
8396
8397
8398/**
8399 * @opmaps grp2_c1
8400 * @opcode /1
8401 * @opflclass rotate_count
8402 */
8403FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_Ib, uint8_t, bRm)
8404{
8405 IEMOP_MNEMONIC2(MI, ROR, ror, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8406 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8407}
8408
8409
8410/**
8411 * @opmaps grp2_c1
8412 * @opcode /2
8413 * @opflclass rotate_carry_count
8414 */
8415FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_Ib, uint8_t, bRm)
8416{
8417 IEMOP_MNEMONIC2(MI, RCL, rcl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8418 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8419}
8420
8421
8422/**
8423 * @opmaps grp2_c1
8424 * @opcode /3
8425 * @opflclass rotate_carry_count
8426 */
8427FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_Ib, uint8_t, bRm)
8428{
8429 IEMOP_MNEMONIC2(MI, RCR, rcr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8430 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8431}
8432
8433
8434/**
8435 * @opmaps grp2_c1
8436 * @opcode /4
8437 * @opflclass shift_count
8438 */
8439FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_Ib, uint8_t, bRm)
8440{
8441 IEMOP_MNEMONIC2(MI, SHL, shl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8442 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8443}
8444
8445
8446/**
8447 * @opmaps grp2_c1
8448 * @opcode /5
8449 * @opflclass shift_count
8450 */
8451FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_Ib, uint8_t, bRm)
8452{
8453 IEMOP_MNEMONIC2(MI, SHR, shr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8454 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8455}
8456
8457
8458/**
8459 * @opmaps grp2_c1
8460 * @opcode /7
8461 * @opflclass shift_count
8462 */
8463FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_Ib, uint8_t, bRm)
8464{
8465 IEMOP_MNEMONIC2(MI, SAR, sar, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8466 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8467}
8468
8469#undef GRP2_BODY_Ev_Ib
8470
8471/**
8472 * @opcode 0xc1
8473 */
8474FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
8475{
8476 IEMOP_HLP_MIN_186();
8477 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8478
8479 switch (IEM_GET_MODRM_REG_8(bRm))
8480 {
8481 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_Ib, bRm);
8482 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_Ib, bRm);
8483 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_Ib, bRm);
8484 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_Ib, bRm);
8485 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_Ib, bRm);
8486 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_Ib, bRm);
8487 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_Ib, bRm);
8488 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8489 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8490 }
8491}
8492
8493
8494/**
8495 * @opcode 0xc2
8496 */
8497FNIEMOP_DEF(iemOp_retn_Iw)
8498{
8499 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
8500 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8501 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8503 switch (pVCpu->iem.s.enmEffOpSize)
8504 {
8505 case IEMMODE_16BIT:
8506 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8507 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_16, u16Imm);
8508 case IEMMODE_32BIT:
8509 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8510 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_32, u16Imm);
8511 case IEMMODE_64BIT:
8512 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8513 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_64, u16Imm);
8514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8515 }
8516}
8517
8518
8519/**
8520 * @opcode 0xc3
8521 */
8522FNIEMOP_DEF(iemOp_retn)
8523{
8524 IEMOP_MNEMONIC(retn, "retn");
8525 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8527 switch (pVCpu->iem.s.enmEffOpSize)
8528 {
8529 case IEMMODE_16BIT:
8530 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8531 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_16);
8532 case IEMMODE_32BIT:
8533 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8534 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_32);
8535 case IEMMODE_64BIT:
8536 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8537 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_64);
8538 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8539 }
8540}
8541
8542
8543/**
8544 * @opcode 0xc4
8545 */
8546FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
8547{
8548 /* The LDS instruction is invalid 64-bit mode. In legacy and
8549 compatability mode it is invalid with MOD=3.
8550 The use as a VEX prefix is made possible by assigning the inverted
8551 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
8552 outside of 64-bit mode. VEX is not available in real or v86 mode. */
8553 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8554 if ( IEM_IS_64BIT_CODE(pVCpu)
8555 || IEM_IS_MODRM_REG_MODE(bRm) )
8556 {
8557 IEMOP_MNEMONIC(vex3_prefix, "vex3");
8558 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8559 {
8560 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8561 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8562 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
8563 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8564 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8565 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
8566 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
8567 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8568 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
8569 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
8570 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
8571 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
8572 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
8573
8574 switch (bRm & 0x1f)
8575 {
8576 case 1: /* 0x0f lead opcode byte. */
8577#ifdef IEM_WITH_VEX
8578 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8579#else
8580 IEMOP_BITCH_ABOUT_STUB();
8581 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8582#endif
8583
8584 case 2: /* 0x0f 0x38 lead opcode bytes. */
8585#ifdef IEM_WITH_VEX
8586 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8587#else
8588 IEMOP_BITCH_ABOUT_STUB();
8589 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8590#endif
8591
8592 case 3: /* 0x0f 0x3a lead opcode bytes. */
8593#ifdef IEM_WITH_VEX
8594 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8595#else
8596 IEMOP_BITCH_ABOUT_STUB();
8597 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8598#endif
8599
8600 default:
8601 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
8602 IEMOP_RAISE_INVALID_OPCODE_RET();
8603 }
8604 }
8605 Log(("VEX3: VEX support disabled!\n"));
8606 IEMOP_RAISE_INVALID_OPCODE_RET();
8607 }
8608
8609 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
8610 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
8611}
8612
8613
8614/**
8615 * @opcode 0xc5
8616 */
8617FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
8618{
8619 /* The LES instruction is invalid 64-bit mode. In legacy and
8620 compatability mode it is invalid with MOD=3.
8621 The use as a VEX prefix is made possible by assigning the inverted
8622 REX.R to the top MOD bit, and the top bit in the inverted register
8623 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
8624 to accessing registers 0..7 in this VEX form. */
8625 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8626 if ( IEM_IS_64BIT_CODE(pVCpu)
8627 || IEM_IS_MODRM_REG_MODE(bRm))
8628 {
8629 IEMOP_MNEMONIC(vex2_prefix, "vex2");
8630 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8631 {
8632 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8633 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8634 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8635 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8636 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8637 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
8638 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
8639 pVCpu->iem.s.idxPrefix = bRm & 0x3;
8640
8641#ifdef IEM_WITH_VEX
8642 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8643#else
8644 IEMOP_BITCH_ABOUT_STUB();
8645 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8646#endif
8647 }
8648
8649 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
8650 Log(("VEX2: VEX support disabled!\n"));
8651 IEMOP_RAISE_INVALID_OPCODE_RET();
8652 }
8653
8654 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
8655 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
8656}
8657
8658
8659/**
8660 * @opcode 0xc6
8661 */
8662FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
8663{
8664 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8665 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8666 IEMOP_RAISE_INVALID_OPCODE_RET();
8667 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
8668
8669 if (IEM_IS_MODRM_REG_MODE(bRm))
8670 {
8671 /* register access */
8672 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8673 IEM_MC_BEGIN(0, 0, 0, 0);
8674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8675 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
8676 IEM_MC_ADVANCE_RIP_AND_FINISH();
8677 IEM_MC_END();
8678 }
8679 else
8680 {
8681 /* memory access. */
8682 IEM_MC_BEGIN(0, 1, 0, 0);
8683 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8685 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8687 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
8688 IEM_MC_ADVANCE_RIP_AND_FINISH();
8689 IEM_MC_END();
8690 }
8691}
8692
8693
8694/**
8695 * @opcode 0xc7
8696 */
8697FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
8698{
8699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8700 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
8701 IEMOP_RAISE_INVALID_OPCODE_RET();
8702 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
8703
8704 if (IEM_IS_MODRM_REG_MODE(bRm))
8705 {
8706 /* register access */
8707 switch (pVCpu->iem.s.enmEffOpSize)
8708 {
8709 case IEMMODE_16BIT:
8710 IEM_MC_BEGIN(0, 0, 0, 0);
8711 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8713 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8714 IEM_MC_ADVANCE_RIP_AND_FINISH();
8715 IEM_MC_END();
8716 break;
8717
8718 case IEMMODE_32BIT:
8719 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8720 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8722 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8723 IEM_MC_ADVANCE_RIP_AND_FINISH();
8724 IEM_MC_END();
8725 break;
8726
8727 case IEMMODE_64BIT:
8728 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
8729 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8731 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8732 IEM_MC_ADVANCE_RIP_AND_FINISH();
8733 IEM_MC_END();
8734 break;
8735
8736 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8737 }
8738 }
8739 else
8740 {
8741 /* memory access. */
8742 switch (pVCpu->iem.s.enmEffOpSize)
8743 {
8744 case IEMMODE_16BIT:
8745 IEM_MC_BEGIN(0, 1, 0, 0);
8746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8748 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8750 IEM_MC_STORE_MEM_U16_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8751 IEM_MC_ADVANCE_RIP_AND_FINISH();
8752 IEM_MC_END();
8753 break;
8754
8755 case IEMMODE_32BIT:
8756 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8759 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8761 IEM_MC_STORE_MEM_U32_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8762 IEM_MC_ADVANCE_RIP_AND_FINISH();
8763 IEM_MC_END();
8764 break;
8765
8766 case IEMMODE_64BIT:
8767 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8768 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8770 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8772 IEM_MC_STORE_MEM_U64_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8773 IEM_MC_ADVANCE_RIP_AND_FINISH();
8774 IEM_MC_END();
8775 break;
8776
8777 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8778 }
8779 }
8780}
8781
8782
8783
8784
8785/**
8786 * @opcode 0xc8
8787 */
8788FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8789{
8790 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8791 IEMOP_HLP_MIN_186();
8792 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8793 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
8794 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
8795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8796 IEM_MC_DEFER_TO_CIMPL_3_RET(0,
8797 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8798 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8799 iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
8800}
8801
8802
8803/**
8804 * @opcode 0xc9
8805 */
8806FNIEMOP_DEF(iemOp_leave)
8807{
8808 IEMOP_MNEMONIC(leave, "leave");
8809 IEMOP_HLP_MIN_186();
8810 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8812 IEM_MC_DEFER_TO_CIMPL_1_RET(0,
8813 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8814 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8815 iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
8816}
8817
8818
8819/**
8820 * @opcode 0xca
8821 */
8822FNIEMOP_DEF(iemOp_retf_Iw)
8823{
8824 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
8825 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8827 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8828 | IEM_CIMPL_F_MODE,
8829 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8830 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8831 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8832 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8833 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8834 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8835 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8836 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8837 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8838 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8839 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8840 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8841 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
8842 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
8843 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
8844 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
8845 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
8846 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
8847}
8848
8849
8850/**
8851 * @opcode 0xcb
8852 */
8853FNIEMOP_DEF(iemOp_retf)
8854{
8855 IEMOP_MNEMONIC(retf, "retf");
8856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8857 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8858 | IEM_CIMPL_F_MODE,
8859 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8860 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8861 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8862 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8863 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8864 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8865 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8866 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8867 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8868 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8869 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8870 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8871 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
8872 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
8873 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
8874 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
8875 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
8876 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
8877}
8878
8879
8880/**
8881 * @opcode 0xcc
8882 */
8883FNIEMOP_DEF(iemOp_int3)
8884{
8885 IEMOP_MNEMONIC(int3, "int3");
8886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8887 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8888 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
8889 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
8890}
8891
8892
8893/**
8894 * @opcode 0xcd
8895 */
8896FNIEMOP_DEF(iemOp_int_Ib)
8897{
8898 IEMOP_MNEMONIC(int_Ib, "int Ib");
8899 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
8900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8901 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8902 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, UINT64_MAX,
8903 iemCImpl_int, u8Int, IEMINT_INTN);
8904 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8905}
8906
8907
8908/**
8909 * @opcode 0xce
8910 */
8911FNIEMOP_DEF(iemOp_into)
8912{
8913 IEMOP_MNEMONIC(into, "into");
8914 IEMOP_HLP_NO_64BIT();
8915 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8916 | IEM_CIMPL_F_BRANCH_CONDITIONAL | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8917 UINT64_MAX,
8918 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
8919 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8920}
8921
8922
8923/**
8924 * @opcode 0xcf
8925 */
8926FNIEMOP_DEF(iemOp_iret)
8927{
8928 IEMOP_MNEMONIC(iret, "iret");
8929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8930 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8931 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
8932 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8933 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8934 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8935 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8936 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
8937 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8938 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8939 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8940 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
8941 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8942 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8943 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8944 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
8945 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8946 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8947 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
8948 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
8949 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
8950 /* Segment registers are sanitized when returning to an outer ring, or fully
8951 reloaded when returning to v86 mode. Thus the large flush list above. */
8952}
8953
8954
8955/**
8956 * @opcode 0xd0
8957 */
8958FNIEMOP_DEF(iemOp_Grp2_Eb_1)
8959{
8960 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8961
8962 /* Need to use a body macro here since the EFLAGS behaviour differs between
8963 the shifts, rotates and rotate w/ carry. Sigh. */
8964#define GRP2_BODY_Eb_1(a_pImplExpr) \
8965 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8966 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8967 { \
8968 /* register */ \
8969 IEM_MC_BEGIN(3, 0, 0, 0); \
8970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8971 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8972 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
8973 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8974 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8975 IEM_MC_REF_EFLAGS(pEFlags); \
8976 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8977 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8978 IEM_MC_END(); \
8979 } \
8980 else \
8981 { \
8982 /* memory */ \
8983 IEM_MC_BEGIN(3, 3, 0, 0); \
8984 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8985 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
8986 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
8987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8988 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8989 \
8990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8992 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8993 IEM_MC_FETCH_EFLAGS(EFlags); \
8994 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8995 \
8996 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8997 IEM_MC_COMMIT_EFLAGS(EFlags); \
8998 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8999 IEM_MC_END(); \
9000 } (void)0
9001
9002 switch (IEM_GET_MODRM_REG_8(bRm))
9003 {
9004 /**
9005 * @opdone
9006 * @opmaps grp2_d0
9007 * @opcode /0
9008 * @opflclass rotate_1
9009 */
9010 case 0:
9011 {
9012 IEMOP_MNEMONIC2(M1, ROL, rol, Eb, 1, DISOPTYPE_HARMLESS, 0);
9013 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9014 break;
9015 }
9016 /**
9017 * @opdone
9018 * @opmaps grp2_d0
9019 * @opcode /1
9020 * @opflclass rotate_1
9021 */
9022 case 1:
9023 {
9024 IEMOP_MNEMONIC2(M1, ROR, ror, Eb, 1, DISOPTYPE_HARMLESS, 0);
9025 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9026 break;
9027 }
9028 /**
9029 * @opdone
9030 * @opmaps grp2_d0
9031 * @opcode /2
9032 * @opflclass rotate_carry_1
9033 */
9034 case 2:
9035 {
9036 IEMOP_MNEMONIC2(M1, RCL, rcl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9037 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9038 break;
9039 }
9040 /**
9041 * @opdone
9042 * @opmaps grp2_d0
9043 * @opcode /3
9044 * @opflclass rotate_carry_1
9045 */
9046 case 3:
9047 {
9048 IEMOP_MNEMONIC2(M1, RCR, rcr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9049 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9050 break;
9051 }
9052 /**
9053 * @opdone
9054 * @opmaps grp2_d0
9055 * @opcode /4
9056 * @opflclass shift_1
9057 */
9058 case 4:
9059 {
9060 IEMOP_MNEMONIC2(M1, SHL, shl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9061 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9062 break;
9063 }
9064 /**
9065 * @opdone
9066 * @opmaps grp2_d0
9067 * @opcode /5
9068 * @opflclass shift_1
9069 */
9070 case 5:
9071 {
9072 IEMOP_MNEMONIC2(M1, SHR, shr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9073 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9074 break;
9075 }
9076 /**
9077 * @opdone
9078 * @opmaps grp2_d0
9079 * @opcode /7
9080 * @opflclass shift_1
9081 */
9082 case 7:
9083 {
9084 IEMOP_MNEMONIC2(M1, SAR, sar, Eb, 1, DISOPTYPE_HARMLESS, 0);
9085 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9086 break;
9087 }
9088 /** @opdone */
9089 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9090 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9091 }
9092#undef GRP2_BODY_Eb_1
9093}
9094
9095
9096/* Need to use a body macro here since the EFLAGS behaviour differs between
9097 the shifts, rotates and rotate w/ carry. Sigh. */
9098#define GRP2_BODY_Ev_1(a_pImplExpr) \
9099 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9100 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9101 { \
9102 /* register */ \
9103 switch (pVCpu->iem.s.enmEffOpSize) \
9104 { \
9105 case IEMMODE_16BIT: \
9106 IEM_MC_BEGIN(3, 0, 0, 0); \
9107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9108 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9109 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9110 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9111 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9112 IEM_MC_REF_EFLAGS(pEFlags); \
9113 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9114 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9115 IEM_MC_END(); \
9116 break; \
9117 \
9118 case IEMMODE_32BIT: \
9119 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9121 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9122 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9123 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9124 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9125 IEM_MC_REF_EFLAGS(pEFlags); \
9126 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9127 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9128 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9129 IEM_MC_END(); \
9130 break; \
9131 \
9132 case IEMMODE_64BIT: \
9133 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9135 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9136 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9137 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9138 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9139 IEM_MC_REF_EFLAGS(pEFlags); \
9140 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9141 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9142 IEM_MC_END(); \
9143 break; \
9144 \
9145 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9146 } \
9147 } \
9148 else \
9149 { \
9150 /* memory */ \
9151 switch (pVCpu->iem.s.enmEffOpSize) \
9152 { \
9153 case IEMMODE_16BIT: \
9154 IEM_MC_BEGIN(3, 3, 0, 0); \
9155 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9156 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9157 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9159 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9160 \
9161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9163 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9164 IEM_MC_FETCH_EFLAGS(EFlags); \
9165 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9166 \
9167 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9168 IEM_MC_COMMIT_EFLAGS(EFlags); \
9169 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9170 IEM_MC_END(); \
9171 break; \
9172 \
9173 case IEMMODE_32BIT: \
9174 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
9175 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9176 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9177 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9179 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9180 \
9181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9183 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9184 IEM_MC_FETCH_EFLAGS(EFlags); \
9185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9186 \
9187 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9188 IEM_MC_COMMIT_EFLAGS(EFlags); \
9189 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9190 IEM_MC_END(); \
9191 break; \
9192 \
9193 case IEMMODE_64BIT: \
9194 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
9195 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9196 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9197 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9199 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9200 \
9201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9203 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9204 IEM_MC_FETCH_EFLAGS(EFlags); \
9205 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9206 \
9207 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9208 IEM_MC_COMMIT_EFLAGS(EFlags); \
9209 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9210 IEM_MC_END(); \
9211 break; \
9212 \
9213 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9214 } \
9215 } (void)0
9216
9217/**
9218 * @opmaps grp2_d1
9219 * @opcode /0
9220 * @opflclass rotate_1
9221 */
9222FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_1, uint8_t, bRm)
9223{
9224 IEMOP_MNEMONIC2(M1, ROL, rol, Ev, 1, DISOPTYPE_HARMLESS, 0);
9225 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9226}
9227
9228
9229/**
9230 * @opmaps grp2_d1
9231 * @opcode /1
9232 * @opflclass rotate_1
9233 */
9234FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_1, uint8_t, bRm)
9235{
9236 IEMOP_MNEMONIC2(M1, ROR, ror, Ev, 1, DISOPTYPE_HARMLESS, 0);
9237 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9238}
9239
9240
9241/**
9242 * @opmaps grp2_d1
9243 * @opcode /2
9244 * @opflclass rotate_carry_1
9245 */
9246FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_1, uint8_t, bRm)
9247{
9248 IEMOP_MNEMONIC2(M1, RCL, rcl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9249 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9250}
9251
9252
9253/**
9254 * @opmaps grp2_d1
9255 * @opcode /3
9256 * @opflclass rotate_carry_1
9257 */
9258FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_1, uint8_t, bRm)
9259{
9260 IEMOP_MNEMONIC2(M1, RCR, rcr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9261 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9262}
9263
9264
9265/**
9266 * @opmaps grp2_d1
9267 * @opcode /4
9268 * @opflclass shift_1
9269 */
9270FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_1, uint8_t, bRm)
9271{
9272 IEMOP_MNEMONIC2(M1, SHL, shl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9273 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9274}
9275
9276
9277/**
9278 * @opmaps grp2_d1
9279 * @opcode /5
9280 * @opflclass shift_1
9281 */
9282FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_1, uint8_t, bRm)
9283{
9284 IEMOP_MNEMONIC2(M1, SHR, shr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9285 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9286}
9287
9288
9289/**
9290 * @opmaps grp2_d1
9291 * @opcode /7
9292 * @opflclass shift_1
9293 */
9294FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_1, uint8_t, bRm)
9295{
9296 IEMOP_MNEMONIC2(M1, SAR, sar, Ev, 1, DISOPTYPE_HARMLESS, 0);
9297 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9298}
9299
9300#undef GRP2_BODY_Ev_1
9301
9302/**
9303 * @opcode 0xd1
9304 */
9305FNIEMOP_DEF(iemOp_Grp2_Ev_1)
9306{
9307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9308 switch (IEM_GET_MODRM_REG_8(bRm))
9309 {
9310 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_1, bRm);
9311 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_1, bRm);
9312 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_1, bRm);
9313 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_1, bRm);
9314 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_1, bRm);
9315 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_1, bRm);
9316 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_1, bRm);
9317 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9318 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9319 }
9320}
9321
9322
9323/**
9324 * @opcode 0xd2
9325 */
9326FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
9327{
9328 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9329
9330 /* Need to use a body macro here since the EFLAGS behaviour differs between
9331 the shifts, rotates and rotate w/ carry. Sigh. */
9332#define GRP2_BODY_Eb_CL(a_pImplExpr) \
9333 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9334 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9335 { \
9336 /* register */ \
9337 IEM_MC_BEGIN(3, 0, 0, 0); \
9338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9339 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9340 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9341 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9342 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9343 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9344 IEM_MC_REF_EFLAGS(pEFlags); \
9345 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9346 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9347 IEM_MC_END(); \
9348 } \
9349 else \
9350 { \
9351 /* memory */ \
9352 IEM_MC_BEGIN(3, 3, 0, 0); \
9353 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9354 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9355 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9357 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9358 \
9359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9361 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9362 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9363 IEM_MC_FETCH_EFLAGS(EFlags); \
9364 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9365 \
9366 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9367 IEM_MC_COMMIT_EFLAGS(EFlags); \
9368 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9369 IEM_MC_END(); \
9370 } (void)0
9371
9372 switch (IEM_GET_MODRM_REG_8(bRm))
9373 {
9374 /**
9375 * @opdone
9376 * @opmaps grp2_d0
9377 * @opcode /0
9378 * @opflclass rotate_count
9379 */
9380 case 0:
9381 {
9382 IEMOP_MNEMONIC2EX(rol_Eb_CL, "rol Eb,CL", M_CL, ROL, rol, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9383 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9384 break;
9385 }
9386 /**
9387 * @opdone
9388 * @opmaps grp2_d0
9389 * @opcode /1
9390 * @opflclass rotate_count
9391 */
9392 case 1:
9393 {
9394 IEMOP_MNEMONIC2EX(ror_Eb_CL, "ror Eb,CL", M_CL, ROR, ror, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9395 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9396 break;
9397 }
9398 /**
9399 * @opdone
9400 * @opmaps grp2_d0
9401 * @opcode /2
9402 * @opflclass rotate_carry_count
9403 */
9404 case 2:
9405 {
9406 IEMOP_MNEMONIC2EX(rcl_Eb_CL, "rcl Eb,CL", M_CL, RCL, rcl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9407 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9408 break;
9409 }
9410 /**
9411 * @opdone
9412 * @opmaps grp2_d0
9413 * @opcode /3
9414 * @opflclass rotate_carry_count
9415 */
9416 case 3:
9417 {
9418 IEMOP_MNEMONIC2EX(rcr_Eb_CL, "rcr Eb,CL", M_CL, RCR, rcr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9419 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9420 break;
9421 }
9422 /**
9423 * @opdone
9424 * @opmaps grp2_d0
9425 * @opcode /4
9426 * @opflclass shift_count
9427 */
9428 case 4:
9429 {
9430 IEMOP_MNEMONIC2EX(shl_Eb_CL, "shl Eb,CL", M_CL, SHL, shl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9431 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9432 break;
9433 }
9434 /**
9435 * @opdone
9436 * @opmaps grp2_d0
9437 * @opcode /5
9438 * @opflclass shift_count
9439 */
9440 case 5:
9441 {
9442 IEMOP_MNEMONIC2EX(shr_Eb_CL, "shr Eb,CL", M_CL, SHR, shr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9443 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9444 break;
9445 }
9446 /**
9447 * @opdone
9448 * @opmaps grp2_d0
9449 * @opcode /7
9450 * @opflclass shift_count
9451 */
9452 case 7:
9453 {
9454 IEMOP_MNEMONIC2EX(sar_Eb_CL, "sar Eb,CL", M_CL, SAR, sar, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9455 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9456 break;
9457 }
9458 /** @opdone */
9459 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9460 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9461 }
9462#undef GRP2_BODY_Eb_CL
9463}
9464
9465
9466/* Need to use a body macro here since the EFLAGS behaviour differs between
9467 the shifts, rotates and rotate w/ carry. Sigh. */
9468#define GRP2_BODY_Ev_CL(a_pImplExpr) \
9469 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9470 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9471 { \
9472 /* register */ \
9473 switch (pVCpu->iem.s.enmEffOpSize) \
9474 { \
9475 case IEMMODE_16BIT: \
9476 IEM_MC_BEGIN(3, 0, 0, 0); \
9477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9478 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9479 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9480 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9481 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9482 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9483 IEM_MC_REF_EFLAGS(pEFlags); \
9484 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9485 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9486 IEM_MC_END(); \
9487 break; \
9488 \
9489 case IEMMODE_32BIT: \
9490 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9492 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9493 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9494 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9495 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9496 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9497 IEM_MC_REF_EFLAGS(pEFlags); \
9498 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9499 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9500 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9501 IEM_MC_END(); \
9502 break; \
9503 \
9504 case IEMMODE_64BIT: \
9505 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9507 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9508 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9509 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9510 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9511 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9512 IEM_MC_REF_EFLAGS(pEFlags); \
9513 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9514 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9515 IEM_MC_END(); \
9516 break; \
9517 \
9518 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9519 } \
9520 } \
9521 else \
9522 { \
9523 /* memory */ \
9524 switch (pVCpu->iem.s.enmEffOpSize) \
9525 { \
9526 case IEMMODE_16BIT: \
9527 IEM_MC_BEGIN(3, 3, 0, 0); \
9528 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9529 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9530 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9531 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9532 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9533 \
9534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9536 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9537 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9538 IEM_MC_FETCH_EFLAGS(EFlags); \
9539 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9540 \
9541 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9542 IEM_MC_COMMIT_EFLAGS(EFlags); \
9543 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9544 IEM_MC_END(); \
9545 break; \
9546 \
9547 case IEMMODE_32BIT: \
9548 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
9549 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9550 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9551 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9553 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9554 \
9555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9557 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9558 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9559 IEM_MC_FETCH_EFLAGS(EFlags); \
9560 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9561 \
9562 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9563 IEM_MC_COMMIT_EFLAGS(EFlags); \
9564 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9565 IEM_MC_END(); \
9566 break; \
9567 \
9568 case IEMMODE_64BIT: \
9569 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
9570 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9571 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9572 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9573 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9574 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9575 \
9576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9578 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9579 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9580 IEM_MC_FETCH_EFLAGS(EFlags); \
9581 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9582 \
9583 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9584 IEM_MC_COMMIT_EFLAGS(EFlags); \
9585 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9586 IEM_MC_END(); \
9587 break; \
9588 \
9589 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9590 } \
9591 } (void)0
9592
9593
9594/**
9595 * @opmaps grp2_d0
9596 * @opcode /0
9597 * @opflclass rotate_count
9598 */
9599FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_CL, uint8_t, bRm)
9600{
9601 IEMOP_MNEMONIC2EX(rol_Ev_CL, "rol Ev,CL", M_CL, ROL, rol, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9602 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9603}
9604
9605
9606/**
9607 * @opmaps grp2_d0
9608 * @opcode /1
9609 * @opflclass rotate_count
9610 */
9611FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_CL, uint8_t, bRm)
9612{
9613 IEMOP_MNEMONIC2EX(ror_Ev_CL, "ror Ev,CL", M_CL, ROR, ror, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9614 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9615}
9616
9617
9618/**
9619 * @opmaps grp2_d0
9620 * @opcode /2
9621 * @opflclass rotate_carry_count
9622 */
9623FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_CL, uint8_t, bRm)
9624{
9625 IEMOP_MNEMONIC2EX(rcl_Ev_CL, "rcl Ev,CL", M_CL, RCL, rcl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9626 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9627}
9628
9629
9630/**
9631 * @opmaps grp2_d0
9632 * @opcode /3
9633 * @opflclass rotate_carry_count
9634 */
9635FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_CL, uint8_t, bRm)
9636{
9637 IEMOP_MNEMONIC2EX(rcr_Ev_CL, "rcr Ev,CL", M_CL, RCR, rcr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9638 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9639}
9640
9641
9642/**
9643 * @opmaps grp2_d0
9644 * @opcode /4
9645 * @opflclass shift_count
9646 */
9647FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_CL, uint8_t, bRm)
9648{
9649 IEMOP_MNEMONIC2EX(shl_Ev_CL, "shl Ev,CL", M_CL, SHL, shl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9650 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9651}
9652
9653
9654/**
9655 * @opmaps grp2_d0
9656 * @opcode /5
9657 * @opflclass shift_count
9658 */
9659FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_CL, uint8_t, bRm)
9660{
9661 IEMOP_MNEMONIC2EX(shr_Ev_CL, "shr Ev,CL", M_CL, SHR, shr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9662 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9663}
9664
9665
9666/**
9667 * @opmaps grp2_d0
9668 * @opcode /7
9669 * @opflclass shift_count
9670 */
9671FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_CL, uint8_t, bRm)
9672{
9673 IEMOP_MNEMONIC2EX(sar_Ev_CL, "sar Ev,CL", M_CL, SAR, sar, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9674 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9675}
9676
9677#undef GRP2_BODY_Ev_CL
9678
9679/**
9680 * @opcode 0xd3
9681 */
9682FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
9683{
9684 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9685 switch (IEM_GET_MODRM_REG_8(bRm))
9686 {
9687 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_CL, bRm);
9688 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_CL, bRm);
9689 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_CL, bRm);
9690 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_CL, bRm);
9691 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_CL, bRm);
9692 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_CL, bRm);
9693 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_CL, bRm);
9694 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9695 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9696 }
9697}
9698
9699
9700/**
9701 * @opcode 0xd4
9702 * @opflmodify cf,pf,af,zf,sf,of
9703 * @opflundef cf,af,of
9704 */
9705FNIEMOP_DEF(iemOp_aam_Ib)
9706{
9707/** @todo testcase: aam */
9708 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
9709 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9711 IEMOP_HLP_NO_64BIT();
9712 if (!bImm)
9713 IEMOP_RAISE_DIVIDE_ERROR_RET();
9714 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aam, bImm);
9715}
9716
9717
9718/**
9719 * @opcode 0xd5
9720 * @opflmodify cf,pf,af,zf,sf,of
9721 * @opflundef cf,af,of
9722 */
9723FNIEMOP_DEF(iemOp_aad_Ib)
9724{
9725/** @todo testcase: aad? */
9726 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
9727 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9729 IEMOP_HLP_NO_64BIT();
9730 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aad, bImm);
9731}
9732
9733
9734/**
9735 * @opcode 0xd6
9736 */
9737FNIEMOP_DEF(iemOp_salc)
9738{
9739 IEMOP_MNEMONIC(salc, "salc");
9740 IEMOP_HLP_NO_64BIT();
9741
9742 IEM_MC_BEGIN(0, 0, 0, 0);
9743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9744 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9745 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
9746 } IEM_MC_ELSE() {
9747 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
9748 } IEM_MC_ENDIF();
9749 IEM_MC_ADVANCE_RIP_AND_FINISH();
9750 IEM_MC_END();
9751}
9752
9753
9754/**
9755 * @opcode 0xd7
9756 */
9757FNIEMOP_DEF(iemOp_xlat)
9758{
9759 IEMOP_MNEMONIC(xlat, "xlat");
9760 switch (pVCpu->iem.s.enmEffAddrMode)
9761 {
9762 case IEMMODE_16BIT:
9763 IEM_MC_BEGIN(2, 0, 0, 0);
9764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9765 IEM_MC_LOCAL(uint8_t, u8Tmp);
9766 IEM_MC_LOCAL(uint16_t, u16Addr);
9767 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
9768 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
9769 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
9770 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9771 IEM_MC_ADVANCE_RIP_AND_FINISH();
9772 IEM_MC_END();
9773 break;
9774
9775 case IEMMODE_32BIT:
9776 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
9777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9778 IEM_MC_LOCAL(uint8_t, u8Tmp);
9779 IEM_MC_LOCAL(uint32_t, u32Addr);
9780 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
9781 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
9782 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
9783 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9784 IEM_MC_ADVANCE_RIP_AND_FINISH();
9785 IEM_MC_END();
9786 break;
9787
9788 case IEMMODE_64BIT:
9789 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
9790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9791 IEM_MC_LOCAL(uint8_t, u8Tmp);
9792 IEM_MC_LOCAL(uint64_t, u64Addr);
9793 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
9794 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
9795 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
9796 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9797 IEM_MC_ADVANCE_RIP_AND_FINISH();
9798 IEM_MC_END();
9799 break;
9800
9801 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9802 }
9803}
9804
9805
9806/**
9807 * Common worker for FPU instructions working on ST0 and STn, and storing the
9808 * result in ST0.
9809 *
9810 * @param bRm Mod R/M byte.
9811 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9812 */
9813FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9814{
9815 IEM_MC_BEGIN(3, 1, 0, 0);
9816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9817 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9818 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9819 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9820 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9821
9822 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9823 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9824 IEM_MC_PREPARE_FPU_USAGE();
9825 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9826 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9827 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9828 } IEM_MC_ELSE() {
9829 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9830 } IEM_MC_ENDIF();
9831 IEM_MC_ADVANCE_RIP_AND_FINISH();
9832
9833 IEM_MC_END();
9834}
9835
9836
9837/**
9838 * Common worker for FPU instructions working on ST0 and STn, and only affecting
9839 * flags.
9840 *
9841 * @param bRm Mod R/M byte.
9842 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9843 */
9844FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9845{
9846 IEM_MC_BEGIN(3, 1, 0, 0);
9847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9848 IEM_MC_LOCAL(uint16_t, u16Fsw);
9849 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9850 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9851 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9852
9853 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9854 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9855 IEM_MC_PREPARE_FPU_USAGE();
9856 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9857 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9858 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9859 } IEM_MC_ELSE() {
9860 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9861 } IEM_MC_ENDIF();
9862 IEM_MC_ADVANCE_RIP_AND_FINISH();
9863
9864 IEM_MC_END();
9865}
9866
9867
9868/**
9869 * Common worker for FPU instructions working on ST0 and STn, only affecting
9870 * flags, and popping when done.
9871 *
9872 * @param bRm Mod R/M byte.
9873 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9874 */
9875FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9876{
9877 IEM_MC_BEGIN(3, 1, 0, 0);
9878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9879 IEM_MC_LOCAL(uint16_t, u16Fsw);
9880 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9881 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9882 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9883
9884 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9885 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9886 IEM_MC_PREPARE_FPU_USAGE();
9887 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9888 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9889 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9890 } IEM_MC_ELSE() {
9891 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9892 } IEM_MC_ENDIF();
9893 IEM_MC_ADVANCE_RIP_AND_FINISH();
9894
9895 IEM_MC_END();
9896}
9897
9898
9899/** Opcode 0xd8 11/0. */
9900FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
9901{
9902 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
9903 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
9904}
9905
9906
9907/** Opcode 0xd8 11/1. */
9908FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
9909{
9910 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
9911 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
9912}
9913
9914
9915/** Opcode 0xd8 11/2. */
9916FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
9917{
9918 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
9919 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
9920}
9921
9922
9923/** Opcode 0xd8 11/3. */
9924FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
9925{
9926 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
9927 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
9928}
9929
9930
9931/** Opcode 0xd8 11/4. */
9932FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
9933{
9934 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
9935 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
9936}
9937
9938
9939/** Opcode 0xd8 11/5. */
9940FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
9941{
9942 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
9943 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
9944}
9945
9946
9947/** Opcode 0xd8 11/6. */
9948FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
9949{
9950 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
9951 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
9952}
9953
9954
9955/** Opcode 0xd8 11/7. */
9956FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
9957{
9958 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
9959 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
9960}
9961
9962
9963/**
9964 * Common worker for FPU instructions working on ST0 and an m32r, and storing
9965 * the result in ST0.
9966 *
9967 * @param bRm Mod R/M byte.
9968 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9969 */
9970FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
9971{
9972 IEM_MC_BEGIN(3, 3, 0, 0);
9973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9974 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9975 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9976 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9977 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9978 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9979
9980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9982
9983 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9984 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9985 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9986
9987 IEM_MC_PREPARE_FPU_USAGE();
9988 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9989 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
9990 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9991 } IEM_MC_ELSE() {
9992 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9993 } IEM_MC_ENDIF();
9994 IEM_MC_ADVANCE_RIP_AND_FINISH();
9995
9996 IEM_MC_END();
9997}
9998
9999
10000/** Opcode 0xd8 !11/0. */
10001FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
10002{
10003 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
10004 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
10005}
10006
10007
10008/** Opcode 0xd8 !11/1. */
10009FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
10010{
10011 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
10012 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
10013}
10014
10015
10016/** Opcode 0xd8 !11/2. */
10017FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
10018{
10019 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
10020
10021 IEM_MC_BEGIN(3, 3, 0, 0);
10022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10023 IEM_MC_LOCAL(uint16_t, u16Fsw);
10024 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10025 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10026 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10027 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10028
10029 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10031
10032 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10033 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10034 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10035
10036 IEM_MC_PREPARE_FPU_USAGE();
10037 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10038 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10039 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10040 } IEM_MC_ELSE() {
10041 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10042 } IEM_MC_ENDIF();
10043 IEM_MC_ADVANCE_RIP_AND_FINISH();
10044
10045 IEM_MC_END();
10046}
10047
10048
10049/** Opcode 0xd8 !11/3. */
10050FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
10051{
10052 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
10053
10054 IEM_MC_BEGIN(3, 3, 0, 0);
10055 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10056 IEM_MC_LOCAL(uint16_t, u16Fsw);
10057 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10058 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10059 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10060 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10061
10062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10064
10065 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10066 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10067 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10068
10069 IEM_MC_PREPARE_FPU_USAGE();
10070 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10071 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10072 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10073 } IEM_MC_ELSE() {
10074 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10075 } IEM_MC_ENDIF();
10076 IEM_MC_ADVANCE_RIP_AND_FINISH();
10077
10078 IEM_MC_END();
10079}
10080
10081
10082/** Opcode 0xd8 !11/4. */
10083FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
10084{
10085 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
10086 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
10087}
10088
10089
10090/** Opcode 0xd8 !11/5. */
10091FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
10092{
10093 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
10094 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
10095}
10096
10097
10098/** Opcode 0xd8 !11/6. */
10099FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
10100{
10101 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
10102 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
10103}
10104
10105
10106/** Opcode 0xd8 !11/7. */
10107FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
10108{
10109 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
10110 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
10111}
10112
10113
10114/**
10115 * @opcode 0xd8
10116 */
10117FNIEMOP_DEF(iemOp_EscF0)
10118{
10119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10120 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
10121
10122 if (IEM_IS_MODRM_REG_MODE(bRm))
10123 {
10124 switch (IEM_GET_MODRM_REG_8(bRm))
10125 {
10126 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
10127 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
10128 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
10129 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
10130 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
10131 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
10132 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
10133 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
10134 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10135 }
10136 }
10137 else
10138 {
10139 switch (IEM_GET_MODRM_REG_8(bRm))
10140 {
10141 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
10142 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
10143 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
10144 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
10145 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
10146 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
10147 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
10148 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
10149 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10150 }
10151 }
10152}
10153
10154
10155/** Opcode 0xd9 /0 mem32real
10156 * @sa iemOp_fld_m64r */
10157FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
10158{
10159 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
10160
10161 IEM_MC_BEGIN(2, 3, 0, 0);
10162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10163 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10164 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
10165 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10166 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
10167
10168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10170
10171 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10172 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10173 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10174 IEM_MC_PREPARE_FPU_USAGE();
10175 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10176 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
10177 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10178 } IEM_MC_ELSE() {
10179 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10180 } IEM_MC_ENDIF();
10181 IEM_MC_ADVANCE_RIP_AND_FINISH();
10182
10183 IEM_MC_END();
10184}
10185
10186
10187/** Opcode 0xd9 !11/2 mem32real */
10188FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
10189{
10190 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
10191 IEM_MC_BEGIN(3, 3, 0, 0);
10192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10194
10195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10196 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10197 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10198 IEM_MC_PREPARE_FPU_USAGE();
10199
10200 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10201 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10202 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10203
10204 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10205 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10206 IEM_MC_LOCAL(uint16_t, u16Fsw);
10207 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10208 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10209 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10210 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10211 } IEM_MC_ELSE() {
10212 IEM_MC_IF_FCW_IM() {
10213 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10214 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10215 } IEM_MC_ELSE() {
10216 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10217 } IEM_MC_ENDIF();
10218 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10219 } IEM_MC_ENDIF();
10220 IEM_MC_ADVANCE_RIP_AND_FINISH();
10221
10222 IEM_MC_END();
10223}
10224
10225
10226/** Opcode 0xd9 !11/3 */
10227FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
10228{
10229 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
10230 IEM_MC_BEGIN(3, 3, 0, 0);
10231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10233
10234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10235 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10236 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10237 IEM_MC_PREPARE_FPU_USAGE();
10238
10239 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10240 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10241 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10242
10243 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10244 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10245 IEM_MC_LOCAL(uint16_t, u16Fsw);
10246 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10247 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10248 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10249 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10250 } IEM_MC_ELSE() {
10251 IEM_MC_IF_FCW_IM() {
10252 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10253 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10254 } IEM_MC_ELSE() {
10255 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10256 } IEM_MC_ENDIF();
10257 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10258 } IEM_MC_ENDIF();
10259 IEM_MC_ADVANCE_RIP_AND_FINISH();
10260
10261 IEM_MC_END();
10262}
10263
10264
10265/** Opcode 0xd9 !11/4 */
10266FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
10267{
10268 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
10269 IEM_MC_BEGIN(3, 0, 0, 0);
10270 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
10271 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10272
10273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10274 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10275 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10276
10277 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10278 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
10279 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
10280 IEM_MC_END();
10281}
10282
10283
10284/** Opcode 0xd9 !11/5 */
10285FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
10286{
10287 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
10288 IEM_MC_BEGIN(1, 1, 0, 0);
10289 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10291
10292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10293 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10294 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10295
10296 IEM_MC_ARG(uint16_t, u16Fsw, 0);
10297 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10298
10299 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, 0, iemCImpl_fldcw, u16Fsw);
10300 IEM_MC_END();
10301}
10302
10303
10304/** Opcode 0xd9 !11/6 */
10305FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
10306{
10307 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
10308 IEM_MC_BEGIN(3, 0, 0, 0);
10309 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
10310 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10311
10312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10313 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10314 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10315
10316 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10317 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
10318 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
10319 IEM_MC_END();
10320}
10321
10322
10323/** Opcode 0xd9 !11/7 */
10324FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
10325{
10326 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
10327 IEM_MC_BEGIN(2, 0, 0, 0);
10328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10329 IEM_MC_LOCAL(uint16_t, u16Fcw);
10330 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10332 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10333 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10334 IEM_MC_FETCH_FCW(u16Fcw);
10335 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
10336 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10337 IEM_MC_END();
10338}
10339
10340
10341/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
10342FNIEMOP_DEF(iemOp_fnop)
10343{
10344 IEMOP_MNEMONIC(fnop, "fnop");
10345 IEM_MC_BEGIN(0, 0, 0, 0);
10346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10347 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10348 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10349 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10350 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
10351 * intel optimizations. Investigate. */
10352 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10353 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10354 IEM_MC_END();
10355}
10356
10357
10358/** Opcode 0xd9 11/0 stN */
10359FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
10360{
10361 IEMOP_MNEMONIC(fld_stN, "fld stN");
10362 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10363 * indicates that it does. */
10364 IEM_MC_BEGIN(0, 2, 0, 0);
10365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10366 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10367 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10368 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10369 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10370
10371 IEM_MC_PREPARE_FPU_USAGE();
10372 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
10373 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10374 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10375 } IEM_MC_ELSE() {
10376 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
10377 } IEM_MC_ENDIF();
10378
10379 IEM_MC_ADVANCE_RIP_AND_FINISH();
10380 IEM_MC_END();
10381}
10382
10383
10384/** Opcode 0xd9 11/3 stN */
10385FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
10386{
10387 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
10388 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10389 * indicates that it does. */
10390 IEM_MC_BEGIN(2, 3, 0, 0);
10391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10392 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
10393 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
10394 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10395 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
10396 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
10397 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10398 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10399
10400 IEM_MC_PREPARE_FPU_USAGE();
10401 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10402 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
10403 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
10404 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10405 } IEM_MC_ELSE() {
10406 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
10407 } IEM_MC_ENDIF();
10408
10409 IEM_MC_ADVANCE_RIP_AND_FINISH();
10410 IEM_MC_END();
10411}
10412
10413
10414/** Opcode 0xd9 11/4, 0xdd 11/2. */
10415FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
10416{
10417 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
10418
10419 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
10420 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
10421 if (!iDstReg)
10422 {
10423 IEM_MC_BEGIN(0, 1, 0, 0);
10424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10425 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
10426 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10427 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10428
10429 IEM_MC_PREPARE_FPU_USAGE();
10430 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
10431 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10432 } IEM_MC_ELSE() {
10433 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
10434 } IEM_MC_ENDIF();
10435
10436 IEM_MC_ADVANCE_RIP_AND_FINISH();
10437 IEM_MC_END();
10438 }
10439 else
10440 {
10441 IEM_MC_BEGIN(0, 2, 0, 0);
10442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10443 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10444 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10445 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10446 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10447
10448 IEM_MC_PREPARE_FPU_USAGE();
10449 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10450 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10451 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
10452 } IEM_MC_ELSE() {
10453 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
10454 } IEM_MC_ENDIF();
10455
10456 IEM_MC_ADVANCE_RIP_AND_FINISH();
10457 IEM_MC_END();
10458 }
10459}
10460
10461
10462/**
10463 * Common worker for FPU instructions working on ST0 and replaces it with the
10464 * result, i.e. unary operators.
10465 *
10466 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10467 */
10468FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
10469{
10470 IEM_MC_BEGIN(2, 1, 0, 0);
10471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10472 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10473 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10474 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10475
10476 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10477 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10478 IEM_MC_PREPARE_FPU_USAGE();
10479 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10480 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
10481 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10482 } IEM_MC_ELSE() {
10483 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10484 } IEM_MC_ENDIF();
10485 IEM_MC_ADVANCE_RIP_AND_FINISH();
10486
10487 IEM_MC_END();
10488}
10489
10490
10491/** Opcode 0xd9 0xe0. */
10492FNIEMOP_DEF(iemOp_fchs)
10493{
10494 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
10495 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
10496}
10497
10498
10499/** Opcode 0xd9 0xe1. */
10500FNIEMOP_DEF(iemOp_fabs)
10501{
10502 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
10503 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
10504}
10505
10506
10507/** Opcode 0xd9 0xe4. */
10508FNIEMOP_DEF(iemOp_ftst)
10509{
10510 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
10511 IEM_MC_BEGIN(2, 1, 0, 0);
10512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10513 IEM_MC_LOCAL(uint16_t, u16Fsw);
10514 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10515 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10516
10517 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10518 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10519 IEM_MC_PREPARE_FPU_USAGE();
10520 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10521 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
10522 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10523 } IEM_MC_ELSE() {
10524 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10525 } IEM_MC_ENDIF();
10526 IEM_MC_ADVANCE_RIP_AND_FINISH();
10527
10528 IEM_MC_END();
10529}
10530
10531
10532/** Opcode 0xd9 0xe5. */
10533FNIEMOP_DEF(iemOp_fxam)
10534{
10535 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
10536 IEM_MC_BEGIN(2, 1, 0, 0);
10537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10538 IEM_MC_LOCAL(uint16_t, u16Fsw);
10539 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10540 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10541
10542 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10543 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10544 IEM_MC_PREPARE_FPU_USAGE();
10545 IEM_MC_REF_FPUREG(pr80Value, 0);
10546 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
10547 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10548 IEM_MC_ADVANCE_RIP_AND_FINISH();
10549
10550 IEM_MC_END();
10551}
10552
10553
10554/**
10555 * Common worker for FPU instructions pushing a constant onto the FPU stack.
10556 *
10557 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10558 */
10559FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
10560{
10561 IEM_MC_BEGIN(1, 1, 0, 0);
10562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10563 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10564 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10565
10566 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10567 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10568 IEM_MC_PREPARE_FPU_USAGE();
10569 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10570 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
10571 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10572 } IEM_MC_ELSE() {
10573 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
10574 } IEM_MC_ENDIF();
10575 IEM_MC_ADVANCE_RIP_AND_FINISH();
10576
10577 IEM_MC_END();
10578}
10579
10580
10581/** Opcode 0xd9 0xe8. */
10582FNIEMOP_DEF(iemOp_fld1)
10583{
10584 IEMOP_MNEMONIC(fld1, "fld1");
10585 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
10586}
10587
10588
10589/** Opcode 0xd9 0xe9. */
10590FNIEMOP_DEF(iemOp_fldl2t)
10591{
10592 IEMOP_MNEMONIC(fldl2t, "fldl2t");
10593 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
10594}
10595
10596
10597/** Opcode 0xd9 0xea. */
10598FNIEMOP_DEF(iemOp_fldl2e)
10599{
10600 IEMOP_MNEMONIC(fldl2e, "fldl2e");
10601 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
10602}
10603
10604/** Opcode 0xd9 0xeb. */
10605FNIEMOP_DEF(iemOp_fldpi)
10606{
10607 IEMOP_MNEMONIC(fldpi, "fldpi");
10608 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
10609}
10610
10611
10612/** Opcode 0xd9 0xec. */
10613FNIEMOP_DEF(iemOp_fldlg2)
10614{
10615 IEMOP_MNEMONIC(fldlg2, "fldlg2");
10616 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
10617}
10618
10619/** Opcode 0xd9 0xed. */
10620FNIEMOP_DEF(iemOp_fldln2)
10621{
10622 IEMOP_MNEMONIC(fldln2, "fldln2");
10623 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
10624}
10625
10626
10627/** Opcode 0xd9 0xee. */
10628FNIEMOP_DEF(iemOp_fldz)
10629{
10630 IEMOP_MNEMONIC(fldz, "fldz");
10631 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
10632}
10633
10634
10635/** Opcode 0xd9 0xf0.
10636 *
10637 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
10638 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
10639 * to produce proper results for +Inf and -Inf.
10640 *
10641 * This is probably usful in the implementation pow() and similar.
10642 */
10643FNIEMOP_DEF(iemOp_f2xm1)
10644{
10645 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
10646 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
10647}
10648
10649
10650/**
10651 * Common worker for FPU instructions working on STn and ST0, storing the result
10652 * in STn, and popping the stack unless IE, DE or ZE was raised.
10653 *
10654 * @param bRm Mod R/M byte.
10655 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10656 */
10657FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10658{
10659 IEM_MC_BEGIN(3, 1, 0, 0);
10660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10661 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10662 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10663 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10664 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10665
10666 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10667 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10668
10669 IEM_MC_PREPARE_FPU_USAGE();
10670 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10671 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10672 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10673 } IEM_MC_ELSE() {
10674 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10675 } IEM_MC_ENDIF();
10676 IEM_MC_ADVANCE_RIP_AND_FINISH();
10677
10678 IEM_MC_END();
10679}
10680
10681
10682/** Opcode 0xd9 0xf1. */
10683FNIEMOP_DEF(iemOp_fyl2x)
10684{
10685 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
10686 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
10687}
10688
10689
10690/**
10691 * Common worker for FPU instructions working on ST0 and having two outputs, one
10692 * replacing ST0 and one pushed onto the stack.
10693 *
10694 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10695 */
10696FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
10697{
10698 IEM_MC_BEGIN(2, 1, 0, 0);
10699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10700 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
10701 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
10702 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10703
10704 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10705 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10706 IEM_MC_PREPARE_FPU_USAGE();
10707 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10708 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
10709 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
10710 } IEM_MC_ELSE() {
10711 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
10712 } IEM_MC_ENDIF();
10713 IEM_MC_ADVANCE_RIP_AND_FINISH();
10714
10715 IEM_MC_END();
10716}
10717
10718
10719/** Opcode 0xd9 0xf2. */
10720FNIEMOP_DEF(iemOp_fptan)
10721{
10722 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
10723 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
10724}
10725
10726
10727/** Opcode 0xd9 0xf3. */
10728FNIEMOP_DEF(iemOp_fpatan)
10729{
10730 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
10731 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
10732}
10733
10734
10735/** Opcode 0xd9 0xf4. */
10736FNIEMOP_DEF(iemOp_fxtract)
10737{
10738 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
10739 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
10740}
10741
10742
10743/** Opcode 0xd9 0xf5. */
10744FNIEMOP_DEF(iemOp_fprem1)
10745{
10746 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
10747 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
10748}
10749
10750
10751/** Opcode 0xd9 0xf6. */
10752FNIEMOP_DEF(iemOp_fdecstp)
10753{
10754 IEMOP_MNEMONIC(fdecstp, "fdecstp");
10755 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10756 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10757 * FINCSTP and FDECSTP. */
10758 IEM_MC_BEGIN(0, 0, 0, 0);
10759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10760
10761 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10762 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10763
10764 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10765 IEM_MC_FPU_STACK_DEC_TOP();
10766 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10767
10768 IEM_MC_ADVANCE_RIP_AND_FINISH();
10769 IEM_MC_END();
10770}
10771
10772
10773/** Opcode 0xd9 0xf7. */
10774FNIEMOP_DEF(iemOp_fincstp)
10775{
10776 IEMOP_MNEMONIC(fincstp, "fincstp");
10777 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10778 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10779 * FINCSTP and FDECSTP. */
10780 IEM_MC_BEGIN(0, 0, 0, 0);
10781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10782
10783 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10784 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10785
10786 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10787 IEM_MC_FPU_STACK_INC_TOP();
10788 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10789
10790 IEM_MC_ADVANCE_RIP_AND_FINISH();
10791 IEM_MC_END();
10792}
10793
10794
10795/** Opcode 0xd9 0xf8. */
10796FNIEMOP_DEF(iemOp_fprem)
10797{
10798 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
10799 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
10800}
10801
10802
10803/** Opcode 0xd9 0xf9. */
10804FNIEMOP_DEF(iemOp_fyl2xp1)
10805{
10806 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
10807 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
10808}
10809
10810
10811/** Opcode 0xd9 0xfa. */
10812FNIEMOP_DEF(iemOp_fsqrt)
10813{
10814 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
10815 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
10816}
10817
10818
10819/** Opcode 0xd9 0xfb. */
10820FNIEMOP_DEF(iemOp_fsincos)
10821{
10822 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
10823 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
10824}
10825
10826
10827/** Opcode 0xd9 0xfc. */
10828FNIEMOP_DEF(iemOp_frndint)
10829{
10830 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
10831 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
10832}
10833
10834
10835/** Opcode 0xd9 0xfd. */
10836FNIEMOP_DEF(iemOp_fscale)
10837{
10838 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
10839 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
10840}
10841
10842
10843/** Opcode 0xd9 0xfe. */
10844FNIEMOP_DEF(iemOp_fsin)
10845{
10846 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
10847 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
10848}
10849
10850
10851/** Opcode 0xd9 0xff. */
10852FNIEMOP_DEF(iemOp_fcos)
10853{
10854 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
10855 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
10856}
10857
10858
10859/** Used by iemOp_EscF1. */
10860IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
10861{
10862 /* 0xe0 */ iemOp_fchs,
10863 /* 0xe1 */ iemOp_fabs,
10864 /* 0xe2 */ iemOp_Invalid,
10865 /* 0xe3 */ iemOp_Invalid,
10866 /* 0xe4 */ iemOp_ftst,
10867 /* 0xe5 */ iemOp_fxam,
10868 /* 0xe6 */ iemOp_Invalid,
10869 /* 0xe7 */ iemOp_Invalid,
10870 /* 0xe8 */ iemOp_fld1,
10871 /* 0xe9 */ iemOp_fldl2t,
10872 /* 0xea */ iemOp_fldl2e,
10873 /* 0xeb */ iemOp_fldpi,
10874 /* 0xec */ iemOp_fldlg2,
10875 /* 0xed */ iemOp_fldln2,
10876 /* 0xee */ iemOp_fldz,
10877 /* 0xef */ iemOp_Invalid,
10878 /* 0xf0 */ iemOp_f2xm1,
10879 /* 0xf1 */ iemOp_fyl2x,
10880 /* 0xf2 */ iemOp_fptan,
10881 /* 0xf3 */ iemOp_fpatan,
10882 /* 0xf4 */ iemOp_fxtract,
10883 /* 0xf5 */ iemOp_fprem1,
10884 /* 0xf6 */ iemOp_fdecstp,
10885 /* 0xf7 */ iemOp_fincstp,
10886 /* 0xf8 */ iemOp_fprem,
10887 /* 0xf9 */ iemOp_fyl2xp1,
10888 /* 0xfa */ iemOp_fsqrt,
10889 /* 0xfb */ iemOp_fsincos,
10890 /* 0xfc */ iemOp_frndint,
10891 /* 0xfd */ iemOp_fscale,
10892 /* 0xfe */ iemOp_fsin,
10893 /* 0xff */ iemOp_fcos
10894};
10895
10896
10897/**
10898 * @opcode 0xd9
10899 */
10900FNIEMOP_DEF(iemOp_EscF1)
10901{
10902 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10903 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
10904
10905 if (IEM_IS_MODRM_REG_MODE(bRm))
10906 {
10907 switch (IEM_GET_MODRM_REG_8(bRm))
10908 {
10909 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
10910 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
10911 case 2:
10912 if (bRm == 0xd0)
10913 return FNIEMOP_CALL(iemOp_fnop);
10914 IEMOP_RAISE_INVALID_OPCODE_RET();
10915 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
10916 case 4:
10917 case 5:
10918 case 6:
10919 case 7:
10920 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
10921 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
10922 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10923 }
10924 }
10925 else
10926 {
10927 switch (IEM_GET_MODRM_REG_8(bRm))
10928 {
10929 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
10930 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
10931 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
10932 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
10933 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
10934 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
10935 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
10936 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
10937 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10938 }
10939 }
10940}
10941
10942
10943/** Opcode 0xda 11/0. */
10944FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
10945{
10946 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
10947 IEM_MC_BEGIN(0, 1, 0, 0);
10948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10949 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10950
10951 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10952 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10953
10954 IEM_MC_PREPARE_FPU_USAGE();
10955 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10956 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10957 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10958 } IEM_MC_ENDIF();
10959 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10960 } IEM_MC_ELSE() {
10961 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10962 } IEM_MC_ENDIF();
10963 IEM_MC_ADVANCE_RIP_AND_FINISH();
10964
10965 IEM_MC_END();
10966}
10967
10968
10969/** Opcode 0xda 11/1. */
10970FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
10971{
10972 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
10973 IEM_MC_BEGIN(0, 1, 0, 0);
10974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10975 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10976
10977 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10978 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10979
10980 IEM_MC_PREPARE_FPU_USAGE();
10981 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10982 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10983 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10984 } IEM_MC_ENDIF();
10985 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10986 } IEM_MC_ELSE() {
10987 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10988 } IEM_MC_ENDIF();
10989 IEM_MC_ADVANCE_RIP_AND_FINISH();
10990
10991 IEM_MC_END();
10992}
10993
10994
10995/** Opcode 0xda 11/2. */
10996FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
10997{
10998 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
10999 IEM_MC_BEGIN(0, 1, 0, 0);
11000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11001 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11002
11003 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11004 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11005
11006 IEM_MC_PREPARE_FPU_USAGE();
11007 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11008 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11009 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11010 } IEM_MC_ENDIF();
11011 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11012 } IEM_MC_ELSE() {
11013 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11014 } IEM_MC_ENDIF();
11015 IEM_MC_ADVANCE_RIP_AND_FINISH();
11016
11017 IEM_MC_END();
11018}
11019
11020
11021/** Opcode 0xda 11/3. */
11022FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
11023{
11024 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
11025 IEM_MC_BEGIN(0, 1, 0, 0);
11026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11027 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11028
11029 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11030 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11031
11032 IEM_MC_PREPARE_FPU_USAGE();
11033 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11034 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
11035 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11036 } IEM_MC_ENDIF();
11037 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11038 } IEM_MC_ELSE() {
11039 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11040 } IEM_MC_ENDIF();
11041 IEM_MC_ADVANCE_RIP_AND_FINISH();
11042
11043 IEM_MC_END();
11044}
11045
11046
11047/**
11048 * Common worker for FPU instructions working on ST0 and ST1, only affecting
11049 * flags, and popping twice when done.
11050 *
11051 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11052 */
11053FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
11054{
11055 IEM_MC_BEGIN(3, 1, 0, 0);
11056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11057 IEM_MC_LOCAL(uint16_t, u16Fsw);
11058 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11059 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11060 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11061
11062 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11063 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11064
11065 IEM_MC_PREPARE_FPU_USAGE();
11066 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
11067 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
11068 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11069 } IEM_MC_ELSE() {
11070 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
11071 } IEM_MC_ENDIF();
11072 IEM_MC_ADVANCE_RIP_AND_FINISH();
11073
11074 IEM_MC_END();
11075}
11076
11077
11078/** Opcode 0xda 0xe9. */
11079FNIEMOP_DEF(iemOp_fucompp)
11080{
11081 IEMOP_MNEMONIC(fucompp, "fucompp");
11082 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
11083}
11084
11085
11086/**
11087 * Common worker for FPU instructions working on ST0 and an m32i, and storing
11088 * the result in ST0.
11089 *
11090 * @param bRm Mod R/M byte.
11091 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11092 */
11093FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
11094{
11095 IEM_MC_BEGIN(3, 3, 0, 0);
11096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11097 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11098 IEM_MC_LOCAL(int32_t, i32Val2);
11099 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11100 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11101 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11102
11103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11105
11106 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11107 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11108 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11109
11110 IEM_MC_PREPARE_FPU_USAGE();
11111 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11112 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
11113 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11114 } IEM_MC_ELSE() {
11115 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11116 } IEM_MC_ENDIF();
11117 IEM_MC_ADVANCE_RIP_AND_FINISH();
11118
11119 IEM_MC_END();
11120}
11121
11122
11123/** Opcode 0xda !11/0. */
11124FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
11125{
11126 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
11127 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
11128}
11129
11130
11131/** Opcode 0xda !11/1. */
11132FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
11133{
11134 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
11135 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
11136}
11137
11138
11139/** Opcode 0xda !11/2. */
11140FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
11141{
11142 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
11143
11144 IEM_MC_BEGIN(3, 3, 0, 0);
11145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11146 IEM_MC_LOCAL(uint16_t, u16Fsw);
11147 IEM_MC_LOCAL(int32_t, i32Val2);
11148 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11149 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11150 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11151
11152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11154
11155 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11156 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11157 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11158
11159 IEM_MC_PREPARE_FPU_USAGE();
11160 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11161 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11162 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11163 } IEM_MC_ELSE() {
11164 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11165 } IEM_MC_ENDIF();
11166 IEM_MC_ADVANCE_RIP_AND_FINISH();
11167
11168 IEM_MC_END();
11169}
11170
11171
11172/** Opcode 0xda !11/3. */
11173FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
11174{
11175 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
11176
11177 IEM_MC_BEGIN(3, 3, 0, 0);
11178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11179 IEM_MC_LOCAL(uint16_t, u16Fsw);
11180 IEM_MC_LOCAL(int32_t, i32Val2);
11181 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11182 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11183 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11184
11185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11187
11188 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11189 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11190 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11191
11192 IEM_MC_PREPARE_FPU_USAGE();
11193 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11194 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11195 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11196 } IEM_MC_ELSE() {
11197 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11198 } IEM_MC_ENDIF();
11199 IEM_MC_ADVANCE_RIP_AND_FINISH();
11200
11201 IEM_MC_END();
11202}
11203
11204
11205/** Opcode 0xda !11/4. */
11206FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
11207{
11208 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
11209 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
11210}
11211
11212
11213/** Opcode 0xda !11/5. */
11214FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
11215{
11216 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
11217 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
11218}
11219
11220
11221/** Opcode 0xda !11/6. */
11222FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
11223{
11224 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
11225 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
11226}
11227
11228
11229/** Opcode 0xda !11/7. */
11230FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
11231{
11232 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
11233 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
11234}
11235
11236
11237/**
11238 * @opcode 0xda
11239 */
11240FNIEMOP_DEF(iemOp_EscF2)
11241{
11242 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11243 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
11244 if (IEM_IS_MODRM_REG_MODE(bRm))
11245 {
11246 switch (IEM_GET_MODRM_REG_8(bRm))
11247 {
11248 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
11249 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
11250 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
11251 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
11252 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11253 case 5:
11254 if (bRm == 0xe9)
11255 return FNIEMOP_CALL(iemOp_fucompp);
11256 IEMOP_RAISE_INVALID_OPCODE_RET();
11257 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11258 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11259 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11260 }
11261 }
11262 else
11263 {
11264 switch (IEM_GET_MODRM_REG_8(bRm))
11265 {
11266 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
11267 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
11268 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
11269 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
11270 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
11271 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
11272 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
11273 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
11274 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11275 }
11276 }
11277}
11278
11279
11280/** Opcode 0xdb !11/0. */
11281FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
11282{
11283 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
11284
11285 IEM_MC_BEGIN(2, 3, 0, 0);
11286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11287 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11288 IEM_MC_LOCAL(int32_t, i32Val);
11289 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11290 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
11291
11292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11294
11295 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11296 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11297 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11298
11299 IEM_MC_PREPARE_FPU_USAGE();
11300 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11301 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
11302 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11303 } IEM_MC_ELSE() {
11304 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11305 } IEM_MC_ENDIF();
11306 IEM_MC_ADVANCE_RIP_AND_FINISH();
11307
11308 IEM_MC_END();
11309}
11310
11311
11312/** Opcode 0xdb !11/1. */
11313FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
11314{
11315 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
11316 IEM_MC_BEGIN(3, 3, 0, 0);
11317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11319
11320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11321 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11322 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11323 IEM_MC_PREPARE_FPU_USAGE();
11324
11325 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11326 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11327 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11328
11329 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11330 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11331 IEM_MC_LOCAL(uint16_t, u16Fsw);
11332 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11333 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11334 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11335 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11336 } IEM_MC_ELSE() {
11337 IEM_MC_IF_FCW_IM() {
11338 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11339 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11340 } IEM_MC_ELSE() {
11341 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11342 } IEM_MC_ENDIF();
11343 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11344 } IEM_MC_ENDIF();
11345 IEM_MC_ADVANCE_RIP_AND_FINISH();
11346
11347 IEM_MC_END();
11348}
11349
11350
11351/** Opcode 0xdb !11/2. */
11352FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
11353{
11354 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
11355 IEM_MC_BEGIN(3, 3, 0, 0);
11356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11358
11359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11360 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11361 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11362 IEM_MC_PREPARE_FPU_USAGE();
11363
11364 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11365 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11366 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11367
11368 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11369 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11370 IEM_MC_LOCAL(uint16_t, u16Fsw);
11371 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11372 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11373 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11374 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11375 } IEM_MC_ELSE() {
11376 IEM_MC_IF_FCW_IM() {
11377 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11378 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11379 } IEM_MC_ELSE() {
11380 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11381 } IEM_MC_ENDIF();
11382 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11383 } IEM_MC_ENDIF();
11384 IEM_MC_ADVANCE_RIP_AND_FINISH();
11385
11386 IEM_MC_END();
11387}
11388
11389
11390/** Opcode 0xdb !11/3. */
11391FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
11392{
11393 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
11394 IEM_MC_BEGIN(3, 2, 0, 0);
11395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11397
11398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11399 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11400 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11401 IEM_MC_PREPARE_FPU_USAGE();
11402
11403 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11404 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11405 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11406
11407 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11408 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11409 IEM_MC_LOCAL(uint16_t, u16Fsw);
11410 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11411 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11412 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11413 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11414 } IEM_MC_ELSE() {
11415 IEM_MC_IF_FCW_IM() {
11416 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11417 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11418 } IEM_MC_ELSE() {
11419 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11420 } IEM_MC_ENDIF();
11421 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11422 } IEM_MC_ENDIF();
11423 IEM_MC_ADVANCE_RIP_AND_FINISH();
11424
11425 IEM_MC_END();
11426}
11427
11428
11429/** Opcode 0xdb !11/5. */
11430FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
11431{
11432 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
11433
11434 IEM_MC_BEGIN(2, 3, 0, 0);
11435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11436 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11437 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
11438 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11439 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
11440
11441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11443
11444 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11445 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11446 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11447
11448 IEM_MC_PREPARE_FPU_USAGE();
11449 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11450 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
11451 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11452 } IEM_MC_ELSE() {
11453 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11454 } IEM_MC_ENDIF();
11455 IEM_MC_ADVANCE_RIP_AND_FINISH();
11456
11457 IEM_MC_END();
11458}
11459
11460
11461/** Opcode 0xdb !11/7. */
11462FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
11463{
11464 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
11465 IEM_MC_BEGIN(3, 3, 0, 0);
11466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11468
11469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11470 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11471 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11472 IEM_MC_PREPARE_FPU_USAGE();
11473
11474 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11475 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
11476 IEM_MC_MEM_MAP_R80_WO(pr80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11477
11478 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11479 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11480 IEM_MC_LOCAL(uint16_t, u16Fsw);
11481 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11482 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
11483 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11484 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11485 } IEM_MC_ELSE() {
11486 IEM_MC_IF_FCW_IM() {
11487 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
11488 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11489 } IEM_MC_ELSE() {
11490 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11491 } IEM_MC_ENDIF();
11492 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11493 } IEM_MC_ENDIF();
11494 IEM_MC_ADVANCE_RIP_AND_FINISH();
11495
11496 IEM_MC_END();
11497}
11498
11499
11500/** Opcode 0xdb 11/0. */
11501FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
11502{
11503 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
11504 IEM_MC_BEGIN(0, 1, 0, 0);
11505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11506 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11507
11508 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11509 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11510
11511 IEM_MC_PREPARE_FPU_USAGE();
11512 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11513 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
11514 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11515 } IEM_MC_ENDIF();
11516 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11517 } IEM_MC_ELSE() {
11518 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11519 } IEM_MC_ENDIF();
11520 IEM_MC_ADVANCE_RIP_AND_FINISH();
11521
11522 IEM_MC_END();
11523}
11524
11525
11526/** Opcode 0xdb 11/1. */
11527FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
11528{
11529 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
11530 IEM_MC_BEGIN(0, 1, 0, 0);
11531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11532 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11533
11534 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11535 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11536
11537 IEM_MC_PREPARE_FPU_USAGE();
11538 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11539 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11540 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11541 } IEM_MC_ENDIF();
11542 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11543 } IEM_MC_ELSE() {
11544 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11545 } IEM_MC_ENDIF();
11546 IEM_MC_ADVANCE_RIP_AND_FINISH();
11547
11548 IEM_MC_END();
11549}
11550
11551
11552/** Opcode 0xdb 11/2. */
11553FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
11554{
11555 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
11556 IEM_MC_BEGIN(0, 1, 0, 0);
11557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11558 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11559
11560 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11561 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11562
11563 IEM_MC_PREPARE_FPU_USAGE();
11564 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11565 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11566 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11567 } IEM_MC_ENDIF();
11568 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11569 } IEM_MC_ELSE() {
11570 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11571 } IEM_MC_ENDIF();
11572 IEM_MC_ADVANCE_RIP_AND_FINISH();
11573
11574 IEM_MC_END();
11575}
11576
11577
11578/** Opcode 0xdb 11/3. */
11579FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
11580{
11581 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
11582 IEM_MC_BEGIN(0, 1, 0, 0);
11583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11584 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11585
11586 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11587 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11588
11589 IEM_MC_PREPARE_FPU_USAGE();
11590 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11591 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
11592 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11593 } IEM_MC_ENDIF();
11594 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11595 } IEM_MC_ELSE() {
11596 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11597 } IEM_MC_ENDIF();
11598 IEM_MC_ADVANCE_RIP_AND_FINISH();
11599
11600 IEM_MC_END();
11601}
11602
11603
11604/** Opcode 0xdb 0xe0. */
11605FNIEMOP_DEF(iemOp_fneni)
11606{
11607 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
11608 IEM_MC_BEGIN(0, 0, 0, 0);
11609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11610 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11611 IEM_MC_ADVANCE_RIP_AND_FINISH();
11612 IEM_MC_END();
11613}
11614
11615
11616/** Opcode 0xdb 0xe1. */
11617FNIEMOP_DEF(iemOp_fndisi)
11618{
11619 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
11620 IEM_MC_BEGIN(0, 0, 0, 0);
11621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11622 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11623 IEM_MC_ADVANCE_RIP_AND_FINISH();
11624 IEM_MC_END();
11625}
11626
11627
11628/** Opcode 0xdb 0xe2. */
11629FNIEMOP_DEF(iemOp_fnclex)
11630{
11631 IEMOP_MNEMONIC(fnclex, "fnclex");
11632 IEM_MC_BEGIN(0, 0, 0, 0);
11633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11634 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11635 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11636 IEM_MC_CLEAR_FSW_EX();
11637 IEM_MC_ADVANCE_RIP_AND_FINISH();
11638 IEM_MC_END();
11639}
11640
11641
11642/** Opcode 0xdb 0xe3. */
11643FNIEMOP_DEF(iemOp_fninit)
11644{
11645 IEMOP_MNEMONIC(fninit, "fninit");
11646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11647 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, 0, iemCImpl_finit, false /*fCheckXcpts*/);
11648}
11649
11650
11651/** Opcode 0xdb 0xe4. */
11652FNIEMOP_DEF(iemOp_fnsetpm)
11653{
11654 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
11655 IEM_MC_BEGIN(0, 0, 0, 0);
11656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11657 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11658 IEM_MC_ADVANCE_RIP_AND_FINISH();
11659 IEM_MC_END();
11660}
11661
11662
11663/** Opcode 0xdb 0xe5. */
11664FNIEMOP_DEF(iemOp_frstpm)
11665{
11666 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
11667#if 0 /* #UDs on newer CPUs */
11668 IEM_MC_BEGIN(0, 0, 0, 0);
11669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11670 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11671 IEM_MC_ADVANCE_RIP_AND_FINISH();
11672 IEM_MC_END();
11673 return VINF_SUCCESS;
11674#else
11675 IEMOP_RAISE_INVALID_OPCODE_RET();
11676#endif
11677}
11678
11679
11680/** Opcode 0xdb 11/5. */
11681FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
11682{
11683 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
11684 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11685 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
11686 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11687}
11688
11689
11690/** Opcode 0xdb 11/6. */
11691FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
11692{
11693 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
11694 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11695 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11696 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11697}
11698
11699
11700/**
11701 * @opcode 0xdb
11702 */
11703FNIEMOP_DEF(iemOp_EscF3)
11704{
11705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11706 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
11707 if (IEM_IS_MODRM_REG_MODE(bRm))
11708 {
11709 switch (IEM_GET_MODRM_REG_8(bRm))
11710 {
11711 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
11712 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
11713 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
11714 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
11715 case 4:
11716 switch (bRm)
11717 {
11718 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
11719 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
11720 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
11721 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
11722 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
11723 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
11724 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
11725 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
11726 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11727 }
11728 break;
11729 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
11730 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
11731 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11732 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11733 }
11734 }
11735 else
11736 {
11737 switch (IEM_GET_MODRM_REG_8(bRm))
11738 {
11739 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
11740 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
11741 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
11742 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
11743 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11744 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
11745 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11746 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
11747 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11748 }
11749 }
11750}
11751
11752
11753/**
11754 * Common worker for FPU instructions working on STn and ST0, and storing the
11755 * result in STn unless IE, DE or ZE was raised.
11756 *
11757 * @param bRm Mod R/M byte.
11758 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11759 */
11760FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11761{
11762 IEM_MC_BEGIN(3, 1, 0, 0);
11763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11764 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11765 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11766 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11767 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11768
11769 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11770 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11771
11772 IEM_MC_PREPARE_FPU_USAGE();
11773 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
11774 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11775 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11776 } IEM_MC_ELSE() {
11777 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11778 } IEM_MC_ENDIF();
11779 IEM_MC_ADVANCE_RIP_AND_FINISH();
11780
11781 IEM_MC_END();
11782}
11783
11784
11785/** Opcode 0xdc 11/0. */
11786FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
11787{
11788 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
11789 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
11790}
11791
11792
11793/** Opcode 0xdc 11/1. */
11794FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
11795{
11796 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
11797 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
11798}
11799
11800
11801/** Opcode 0xdc 11/4. */
11802FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
11803{
11804 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
11805 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
11806}
11807
11808
11809/** Opcode 0xdc 11/5. */
11810FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
11811{
11812 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
11813 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
11814}
11815
11816
11817/** Opcode 0xdc 11/6. */
11818FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
11819{
11820 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
11821 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
11822}
11823
11824
11825/** Opcode 0xdc 11/7. */
11826FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
11827{
11828 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
11829 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
11830}
11831
11832
11833/**
11834 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
11835 * memory operand, and storing the result in ST0.
11836 *
11837 * @param bRm Mod R/M byte.
11838 * @param pfnImpl Pointer to the instruction implementation (assembly).
11839 */
11840FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
11841{
11842 IEM_MC_BEGIN(3, 3, 0, 0);
11843 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11844 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11845 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
11846 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11847 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
11848 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
11849
11850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11852 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11853 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11854
11855 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11856 IEM_MC_PREPARE_FPU_USAGE();
11857 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
11858 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
11859 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11860 } IEM_MC_ELSE() {
11861 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11862 } IEM_MC_ENDIF();
11863 IEM_MC_ADVANCE_RIP_AND_FINISH();
11864
11865 IEM_MC_END();
11866}
11867
11868
11869/** Opcode 0xdc !11/0. */
11870FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
11871{
11872 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
11873 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
11874}
11875
11876
11877/** Opcode 0xdc !11/1. */
11878FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
11879{
11880 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
11881 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
11882}
11883
11884
11885/** Opcode 0xdc !11/2. */
11886FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
11887{
11888 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
11889
11890 IEM_MC_BEGIN(3, 3, 0, 0);
11891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11892 IEM_MC_LOCAL(uint16_t, u16Fsw);
11893 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11894 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11895 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11896 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11897
11898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11900
11901 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11902 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11903 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11904
11905 IEM_MC_PREPARE_FPU_USAGE();
11906 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11907 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11908 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11909 } IEM_MC_ELSE() {
11910 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11911 } IEM_MC_ENDIF();
11912 IEM_MC_ADVANCE_RIP_AND_FINISH();
11913
11914 IEM_MC_END();
11915}
11916
11917
11918/** Opcode 0xdc !11/3. */
11919FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
11920{
11921 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
11922
11923 IEM_MC_BEGIN(3, 3, 0, 0);
11924 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11925 IEM_MC_LOCAL(uint16_t, u16Fsw);
11926 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11927 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11928 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11929 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11930
11931 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11933
11934 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11935 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11936 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11937
11938 IEM_MC_PREPARE_FPU_USAGE();
11939 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11940 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11941 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11942 } IEM_MC_ELSE() {
11943 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11944 } IEM_MC_ENDIF();
11945 IEM_MC_ADVANCE_RIP_AND_FINISH();
11946
11947 IEM_MC_END();
11948}
11949
11950
11951/** Opcode 0xdc !11/4. */
11952FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
11953{
11954 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
11955 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
11956}
11957
11958
11959/** Opcode 0xdc !11/5. */
11960FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
11961{
11962 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
11963 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
11964}
11965
11966
11967/** Opcode 0xdc !11/6. */
11968FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
11969{
11970 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
11971 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
11972}
11973
11974
11975/** Opcode 0xdc !11/7. */
11976FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
11977{
11978 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
11979 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
11980}
11981
11982
11983/**
11984 * @opcode 0xdc
11985 */
11986FNIEMOP_DEF(iemOp_EscF4)
11987{
11988 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11989 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
11990 if (IEM_IS_MODRM_REG_MODE(bRm))
11991 {
11992 switch (IEM_GET_MODRM_REG_8(bRm))
11993 {
11994 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
11995 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
11996 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
11997 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
11998 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
11999 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
12000 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
12001 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
12002 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12003 }
12004 }
12005 else
12006 {
12007 switch (IEM_GET_MODRM_REG_8(bRm))
12008 {
12009 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
12010 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
12011 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
12012 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
12013 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
12014 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
12015 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
12016 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
12017 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12018 }
12019 }
12020}
12021
12022
12023/** Opcode 0xdd !11/0.
12024 * @sa iemOp_fld_m32r */
12025FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
12026{
12027 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
12028
12029 IEM_MC_BEGIN(2, 3, 0, 0);
12030 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12031 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12032 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
12033 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12034 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
12035
12036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12038 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12039 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12040
12041 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12042 IEM_MC_PREPARE_FPU_USAGE();
12043 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12044 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
12045 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12046 } IEM_MC_ELSE() {
12047 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12048 } IEM_MC_ENDIF();
12049 IEM_MC_ADVANCE_RIP_AND_FINISH();
12050
12051 IEM_MC_END();
12052}
12053
12054
12055/** Opcode 0xdd !11/0. */
12056FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
12057{
12058 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
12059 IEM_MC_BEGIN(3, 3, 0, 0);
12060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12062
12063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12064 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12065 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12066 IEM_MC_PREPARE_FPU_USAGE();
12067
12068 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12069 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12070 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12071
12072 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12073 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12074 IEM_MC_LOCAL(uint16_t, u16Fsw);
12075 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12076 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12077 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12078 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12079 } IEM_MC_ELSE() {
12080 IEM_MC_IF_FCW_IM() {
12081 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12082 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12083 } IEM_MC_ELSE() {
12084 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12085 } IEM_MC_ENDIF();
12086 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12087 } IEM_MC_ENDIF();
12088 IEM_MC_ADVANCE_RIP_AND_FINISH();
12089
12090 IEM_MC_END();
12091}
12092
12093
12094/** Opcode 0xdd !11/0. */
12095FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
12096{
12097 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
12098 IEM_MC_BEGIN(3, 3, 0, 0);
12099 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12101
12102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12103 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12104 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12105 IEM_MC_PREPARE_FPU_USAGE();
12106
12107 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12108 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12109 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12110
12111 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12112 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12113 IEM_MC_LOCAL(uint16_t, u16Fsw);
12114 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12115 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12116 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12117 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12118 } IEM_MC_ELSE() {
12119 IEM_MC_IF_FCW_IM() {
12120 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12121 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12122 } IEM_MC_ELSE() {
12123 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12124 } IEM_MC_ENDIF();
12125 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12126 } IEM_MC_ENDIF();
12127 IEM_MC_ADVANCE_RIP_AND_FINISH();
12128
12129 IEM_MC_END();
12130}
12131
12132
12133
12134
12135/** Opcode 0xdd !11/0. */
12136FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
12137{
12138 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
12139 IEM_MC_BEGIN(3, 3, 0, 0);
12140 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12142
12143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12144 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12145 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12146 IEM_MC_PREPARE_FPU_USAGE();
12147
12148 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12149 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12150 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12151
12152 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12153 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12154 IEM_MC_LOCAL(uint16_t, u16Fsw);
12155 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12156 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12157 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12158 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12159 } IEM_MC_ELSE() {
12160 IEM_MC_IF_FCW_IM() {
12161 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12162 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12163 } IEM_MC_ELSE() {
12164 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12165 } IEM_MC_ENDIF();
12166 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12167 } IEM_MC_ENDIF();
12168 IEM_MC_ADVANCE_RIP_AND_FINISH();
12169
12170 IEM_MC_END();
12171}
12172
12173
12174/** Opcode 0xdd !11/0. */
12175FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
12176{
12177 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
12178 IEM_MC_BEGIN(3, 0, 0, 0);
12179 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
12180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12181
12182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12183 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12184 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12185
12186 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12187 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12188 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
12189 IEM_MC_END();
12190}
12191
12192
12193/** Opcode 0xdd !11/0. */
12194FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
12195{
12196 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
12197 IEM_MC_BEGIN(3, 0, 0, 0);
12198 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
12199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12200
12201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12202 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12203 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
12204
12205 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12206 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12207 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
12208 IEM_MC_END();
12209}
12210
12211/** Opcode 0xdd !11/0. */
12212FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
12213{
12214 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
12215
12216 IEM_MC_BEGIN(0, 2, 0, 0);
12217 IEM_MC_LOCAL(uint16_t, u16Tmp);
12218 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12219
12220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12222 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12223
12224 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12225 IEM_MC_FETCH_FSW(u16Tmp);
12226 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
12227 IEM_MC_ADVANCE_RIP_AND_FINISH();
12228
12229/** @todo Debug / drop a hint to the verifier that things may differ
12230 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
12231 * NT4SP1. (X86_FSW_PE) */
12232 IEM_MC_END();
12233}
12234
12235
12236/** Opcode 0xdd 11/0. */
12237FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
12238{
12239 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
12240 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
12241 unmodified. */
12242 IEM_MC_BEGIN(0, 0, 0, 0);
12243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12244
12245 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12246 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12247
12248 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12249 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12250 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12251
12252 IEM_MC_ADVANCE_RIP_AND_FINISH();
12253 IEM_MC_END();
12254}
12255
12256
12257/** Opcode 0xdd 11/1. */
12258FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
12259{
12260 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
12261 IEM_MC_BEGIN(0, 2, 0, 0);
12262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12263 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
12264 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12265 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12266 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12267
12268 IEM_MC_PREPARE_FPU_USAGE();
12269 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12270 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
12271 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12272 } IEM_MC_ELSE() {
12273 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12274 } IEM_MC_ENDIF();
12275
12276 IEM_MC_ADVANCE_RIP_AND_FINISH();
12277 IEM_MC_END();
12278}
12279
12280
12281/** Opcode 0xdd 11/3. */
12282FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
12283{
12284 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
12285 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
12286}
12287
12288
12289/** Opcode 0xdd 11/4. */
12290FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
12291{
12292 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
12293 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
12294}
12295
12296
12297/**
12298 * @opcode 0xdd
12299 */
12300FNIEMOP_DEF(iemOp_EscF5)
12301{
12302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12303 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
12304 if (IEM_IS_MODRM_REG_MODE(bRm))
12305 {
12306 switch (IEM_GET_MODRM_REG_8(bRm))
12307 {
12308 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
12309 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
12310 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
12311 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
12312 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
12313 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
12314 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
12315 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12316 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12317 }
12318 }
12319 else
12320 {
12321 switch (IEM_GET_MODRM_REG_8(bRm))
12322 {
12323 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
12324 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
12325 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
12326 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
12327 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
12328 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
12329 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
12330 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
12331 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12332 }
12333 }
12334}
12335
12336
12337/** Opcode 0xde 11/0. */
12338FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
12339{
12340 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
12341 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
12342}
12343
12344
12345/** Opcode 0xde 11/0. */
12346FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
12347{
12348 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
12349 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
12350}
12351
12352
12353/** Opcode 0xde 0xd9. */
12354FNIEMOP_DEF(iemOp_fcompp)
12355{
12356 IEMOP_MNEMONIC(fcompp, "fcompp");
12357 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
12358}
12359
12360
12361/** Opcode 0xde 11/4. */
12362FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
12363{
12364 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
12365 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
12366}
12367
12368
12369/** Opcode 0xde 11/5. */
12370FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
12371{
12372 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
12373 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
12374}
12375
12376
12377/** Opcode 0xde 11/6. */
12378FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
12379{
12380 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
12381 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
12382}
12383
12384
12385/** Opcode 0xde 11/7. */
12386FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
12387{
12388 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
12389 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
12390}
12391
12392
12393/**
12394 * Common worker for FPU instructions working on ST0 and an m16i, and storing
12395 * the result in ST0.
12396 *
12397 * @param bRm Mod R/M byte.
12398 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12399 */
12400FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
12401{
12402 IEM_MC_BEGIN(3, 3, 0, 0);
12403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12404 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12405 IEM_MC_LOCAL(int16_t, i16Val2);
12406 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12407 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12408 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12409
12410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12412
12413 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12414 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12415 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12416
12417 IEM_MC_PREPARE_FPU_USAGE();
12418 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12419 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
12420 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
12421 } IEM_MC_ELSE() {
12422 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12423 } IEM_MC_ENDIF();
12424 IEM_MC_ADVANCE_RIP_AND_FINISH();
12425
12426 IEM_MC_END();
12427}
12428
12429
12430/** Opcode 0xde !11/0. */
12431FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
12432{
12433 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
12434 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
12435}
12436
12437
12438/** Opcode 0xde !11/1. */
12439FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
12440{
12441 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
12442 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
12443}
12444
12445
12446/** Opcode 0xde !11/2. */
12447FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
12448{
12449 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
12450
12451 IEM_MC_BEGIN(3, 3, 0, 0);
12452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12453 IEM_MC_LOCAL(uint16_t, u16Fsw);
12454 IEM_MC_LOCAL(int16_t, i16Val2);
12455 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12456 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12457 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12458
12459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12461
12462 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12463 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12464 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12465
12466 IEM_MC_PREPARE_FPU_USAGE();
12467 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12468 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
12469 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12470 } IEM_MC_ELSE() {
12471 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12472 } IEM_MC_ENDIF();
12473 IEM_MC_ADVANCE_RIP_AND_FINISH();
12474
12475 IEM_MC_END();
12476}
12477
12478
12479/** Opcode 0xde !11/3. */
12480FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
12481{
12482 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
12483
12484 IEM_MC_BEGIN(3, 3, 0, 0);
12485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12486 IEM_MC_LOCAL(uint16_t, u16Fsw);
12487 IEM_MC_LOCAL(int16_t, i16Val2);
12488 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12489 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12490 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12491
12492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12494
12495 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12496 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12497 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12498
12499 IEM_MC_PREPARE_FPU_USAGE();
12500 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12501 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
12502 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12503 } IEM_MC_ELSE() {
12504 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12505 } IEM_MC_ENDIF();
12506 IEM_MC_ADVANCE_RIP_AND_FINISH();
12507
12508 IEM_MC_END();
12509}
12510
12511
12512/** Opcode 0xde !11/4. */
12513FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
12514{
12515 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
12516 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
12517}
12518
12519
12520/** Opcode 0xde !11/5. */
12521FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
12522{
12523 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
12524 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
12525}
12526
12527
12528/** Opcode 0xde !11/6. */
12529FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
12530{
12531 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
12532 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
12533}
12534
12535
12536/** Opcode 0xde !11/7. */
12537FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
12538{
12539 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
12540 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
12541}
12542
12543
12544/**
12545 * @opcode 0xde
12546 */
12547FNIEMOP_DEF(iemOp_EscF6)
12548{
12549 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12550 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
12551 if (IEM_IS_MODRM_REG_MODE(bRm))
12552 {
12553 switch (IEM_GET_MODRM_REG_8(bRm))
12554 {
12555 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
12556 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
12557 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
12558 case 3: if (bRm == 0xd9)
12559 return FNIEMOP_CALL(iemOp_fcompp);
12560 IEMOP_RAISE_INVALID_OPCODE_RET();
12561 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
12562 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
12563 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
12564 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
12565 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12566 }
12567 }
12568 else
12569 {
12570 switch (IEM_GET_MODRM_REG_8(bRm))
12571 {
12572 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
12573 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
12574 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
12575 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
12576 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
12577 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
12578 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
12579 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
12580 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12581 }
12582 }
12583}
12584
12585
12586/** Opcode 0xdf 11/0.
12587 * Undocument instruction, assumed to work like ffree + fincstp. */
12588FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
12589{
12590 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
12591 IEM_MC_BEGIN(0, 0, 0, 0);
12592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12593
12594 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12595 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12596
12597 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12598 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12599 IEM_MC_FPU_STACK_INC_TOP();
12600 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12601
12602 IEM_MC_ADVANCE_RIP_AND_FINISH();
12603 IEM_MC_END();
12604}
12605
12606
12607/** Opcode 0xdf 0xe0. */
12608FNIEMOP_DEF(iemOp_fnstsw_ax)
12609{
12610 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
12611 IEM_MC_BEGIN(0, 1, 0, 0);
12612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12613 IEM_MC_LOCAL(uint16_t, u16Tmp);
12614 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12615 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12616 IEM_MC_FETCH_FSW(u16Tmp);
12617 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12618 IEM_MC_ADVANCE_RIP_AND_FINISH();
12619 IEM_MC_END();
12620}
12621
12622
12623/** Opcode 0xdf 11/5. */
12624FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
12625{
12626 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
12627 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12628 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12629 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12630}
12631
12632
12633/** Opcode 0xdf 11/6. */
12634FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
12635{
12636 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
12637 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12638 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12639 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12640}
12641
12642
12643/** Opcode 0xdf !11/0. */
12644FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
12645{
12646 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
12647
12648 IEM_MC_BEGIN(2, 3, 0, 0);
12649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12650 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12651 IEM_MC_LOCAL(int16_t, i16Val);
12652 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12653 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
12654
12655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12657
12658 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12659 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12660 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12661
12662 IEM_MC_PREPARE_FPU_USAGE();
12663 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12664 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
12665 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12666 } IEM_MC_ELSE() {
12667 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12668 } IEM_MC_ENDIF();
12669 IEM_MC_ADVANCE_RIP_AND_FINISH();
12670
12671 IEM_MC_END();
12672}
12673
12674
12675/** Opcode 0xdf !11/1. */
12676FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
12677{
12678 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
12679 IEM_MC_BEGIN(3, 3, 0, 0);
12680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12682
12683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12684 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12685 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12686 IEM_MC_PREPARE_FPU_USAGE();
12687
12688 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12689 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12690 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12691
12692 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12693 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12694 IEM_MC_LOCAL(uint16_t, u16Fsw);
12695 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12696 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12697 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12698 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12699 } IEM_MC_ELSE() {
12700 IEM_MC_IF_FCW_IM() {
12701 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12702 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12703 } IEM_MC_ELSE() {
12704 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12705 } IEM_MC_ENDIF();
12706 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12707 } IEM_MC_ENDIF();
12708 IEM_MC_ADVANCE_RIP_AND_FINISH();
12709
12710 IEM_MC_END();
12711}
12712
12713
12714/** Opcode 0xdf !11/2. */
12715FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
12716{
12717 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
12718 IEM_MC_BEGIN(3, 3, 0, 0);
12719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12721
12722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12723 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12724 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12725 IEM_MC_PREPARE_FPU_USAGE();
12726
12727 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12728 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12729 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12730
12731 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12732 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12733 IEM_MC_LOCAL(uint16_t, u16Fsw);
12734 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12735 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12736 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12737 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12738 } IEM_MC_ELSE() {
12739 IEM_MC_IF_FCW_IM() {
12740 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12741 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12742 } IEM_MC_ELSE() {
12743 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12744 } IEM_MC_ENDIF();
12745 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12746 } IEM_MC_ENDIF();
12747 IEM_MC_ADVANCE_RIP_AND_FINISH();
12748
12749 IEM_MC_END();
12750}
12751
12752
12753/** Opcode 0xdf !11/3. */
12754FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
12755{
12756 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
12757 IEM_MC_BEGIN(3, 3, 0, 0);
12758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12760
12761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12762 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12763 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12764 IEM_MC_PREPARE_FPU_USAGE();
12765
12766 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12767 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12768 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12769
12770 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12771 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12772 IEM_MC_LOCAL(uint16_t, u16Fsw);
12773 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12774 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12775 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12776 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12777 } IEM_MC_ELSE() {
12778 IEM_MC_IF_FCW_IM() {
12779 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12780 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12781 } IEM_MC_ELSE() {
12782 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12783 } IEM_MC_ENDIF();
12784 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12785 } IEM_MC_ENDIF();
12786 IEM_MC_ADVANCE_RIP_AND_FINISH();
12787
12788 IEM_MC_END();
12789}
12790
12791
12792/** Opcode 0xdf !11/4. */
12793FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
12794{
12795 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
12796
12797 IEM_MC_BEGIN(2, 3, 0, 0);
12798 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12799 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12800 IEM_MC_LOCAL(RTPBCD80U, d80Val);
12801 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12802 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
12803
12804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12806
12807 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12808 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12809 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12810
12811 IEM_MC_PREPARE_FPU_USAGE();
12812 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12813 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
12814 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12815 } IEM_MC_ELSE() {
12816 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12817 } IEM_MC_ENDIF();
12818 IEM_MC_ADVANCE_RIP_AND_FINISH();
12819
12820 IEM_MC_END();
12821}
12822
12823
12824/** Opcode 0xdf !11/5. */
12825FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
12826{
12827 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
12828
12829 IEM_MC_BEGIN(2, 3, 0, 0);
12830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12831 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12832 IEM_MC_LOCAL(int64_t, i64Val);
12833 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12834 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
12835
12836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12838
12839 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12840 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12841 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12842
12843 IEM_MC_PREPARE_FPU_USAGE();
12844 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12845 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
12846 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12847 } IEM_MC_ELSE() {
12848 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12849 } IEM_MC_ENDIF();
12850 IEM_MC_ADVANCE_RIP_AND_FINISH();
12851
12852 IEM_MC_END();
12853}
12854
12855
12856/** Opcode 0xdf !11/6. */
12857FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
12858{
12859 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
12860 IEM_MC_BEGIN(3, 3, 0, 0);
12861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12863
12864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12865 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12866 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12867 IEM_MC_PREPARE_FPU_USAGE();
12868
12869 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12870 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
12871 IEM_MC_MEM_MAP_D80_WO(pd80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12872
12873 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12874 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12875 IEM_MC_LOCAL(uint16_t, u16Fsw);
12876 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12877 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
12878 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12879 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12880 } IEM_MC_ELSE() {
12881 IEM_MC_IF_FCW_IM() {
12882 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
12883 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12884 } IEM_MC_ELSE() {
12885 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12886 } IEM_MC_ENDIF();
12887 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12888 } IEM_MC_ENDIF();
12889 IEM_MC_ADVANCE_RIP_AND_FINISH();
12890
12891 IEM_MC_END();
12892}
12893
12894
12895/** Opcode 0xdf !11/7. */
12896FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
12897{
12898 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
12899 IEM_MC_BEGIN(3, 3, 0, 0);
12900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12902
12903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12904 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12905 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12906 IEM_MC_PREPARE_FPU_USAGE();
12907
12908 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12909 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12910 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12911
12912 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12913 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12914 IEM_MC_LOCAL(uint16_t, u16Fsw);
12915 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12916 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12917 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12918 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12919 } IEM_MC_ELSE() {
12920 IEM_MC_IF_FCW_IM() {
12921 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12922 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12923 } IEM_MC_ELSE() {
12924 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12925 } IEM_MC_ENDIF();
12926 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12927 } IEM_MC_ENDIF();
12928 IEM_MC_ADVANCE_RIP_AND_FINISH();
12929
12930 IEM_MC_END();
12931}
12932
12933
12934/**
12935 * @opcode 0xdf
12936 */
12937FNIEMOP_DEF(iemOp_EscF7)
12938{
12939 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12940 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
12941 if (IEM_IS_MODRM_REG_MODE(bRm))
12942 {
12943 switch (IEM_GET_MODRM_REG_8(bRm))
12944 {
12945 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
12946 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
12947 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
12948 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
12949 case 4: if (bRm == 0xe0)
12950 return FNIEMOP_CALL(iemOp_fnstsw_ax);
12951 IEMOP_RAISE_INVALID_OPCODE_RET();
12952 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
12953 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
12954 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12955 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12956 }
12957 }
12958 else
12959 {
12960 switch (IEM_GET_MODRM_REG_8(bRm))
12961 {
12962 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
12963 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
12964 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
12965 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
12966 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
12967 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
12968 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
12969 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
12970 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12971 }
12972 }
12973}
12974
12975
12976/**
12977 * @opcode 0xe0
12978 * @opfltest zf
12979 */
12980FNIEMOP_DEF(iemOp_loopne_Jb)
12981{
12982 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
12983 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12984 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12985
12986 switch (pVCpu->iem.s.enmEffAddrMode)
12987 {
12988 case IEMMODE_16BIT:
12989 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12991 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12992 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12993 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12994 } IEM_MC_ELSE() {
12995 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12996 IEM_MC_ADVANCE_RIP_AND_FINISH();
12997 } IEM_MC_ENDIF();
12998 IEM_MC_END();
12999 break;
13000
13001 case IEMMODE_32BIT:
13002 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13004 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13005 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13006 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13007 } IEM_MC_ELSE() {
13008 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13009 IEM_MC_ADVANCE_RIP_AND_FINISH();
13010 } IEM_MC_ENDIF();
13011 IEM_MC_END();
13012 break;
13013
13014 case IEMMODE_64BIT:
13015 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13017 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13018 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13019 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13020 } IEM_MC_ELSE() {
13021 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13022 IEM_MC_ADVANCE_RIP_AND_FINISH();
13023 } IEM_MC_ENDIF();
13024 IEM_MC_END();
13025 break;
13026
13027 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13028 }
13029}
13030
13031
13032/**
13033 * @opcode 0xe1
13034 * @opfltest zf
13035 */
13036FNIEMOP_DEF(iemOp_loope_Jb)
13037{
13038 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
13039 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13040 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13041
13042 switch (pVCpu->iem.s.enmEffAddrMode)
13043 {
13044 case IEMMODE_16BIT:
13045 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13047 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13048 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13049 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13050 } IEM_MC_ELSE() {
13051 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13052 IEM_MC_ADVANCE_RIP_AND_FINISH();
13053 } IEM_MC_ENDIF();
13054 IEM_MC_END();
13055 break;
13056
13057 case IEMMODE_32BIT:
13058 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13060 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13061 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13062 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13063 } IEM_MC_ELSE() {
13064 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13065 IEM_MC_ADVANCE_RIP_AND_FINISH();
13066 } IEM_MC_ENDIF();
13067 IEM_MC_END();
13068 break;
13069
13070 case IEMMODE_64BIT:
13071 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13073 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13074 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13075 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13076 } IEM_MC_ELSE() {
13077 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13078 IEM_MC_ADVANCE_RIP_AND_FINISH();
13079 } IEM_MC_ENDIF();
13080 IEM_MC_END();
13081 break;
13082
13083 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13084 }
13085}
13086
13087
13088/**
13089 * @opcode 0xe2
13090 */
13091FNIEMOP_DEF(iemOp_loop_Jb)
13092{
13093 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
13094 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13095 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13096
13097 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
13098 * using the 32-bit operand size override. How can that be restarted? See
13099 * weird pseudo code in intel manual. */
13100
13101 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
13102 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
13103 * the loop causes guest crashes, but when logging it's nice to skip a few million
13104 * lines of useless output. */
13105#if defined(LOG_ENABLED)
13106 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
13107 switch (pVCpu->iem.s.enmEffAddrMode)
13108 {
13109 case IEMMODE_16BIT:
13110 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13112 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13113 IEM_MC_ADVANCE_RIP_AND_FINISH();
13114 IEM_MC_END();
13115 break;
13116
13117 case IEMMODE_32BIT:
13118 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13120 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13121 IEM_MC_ADVANCE_RIP_AND_FINISH();
13122 IEM_MC_END();
13123 break;
13124
13125 case IEMMODE_64BIT:
13126 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13128 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13129 IEM_MC_ADVANCE_RIP_AND_FINISH();
13130 IEM_MC_END();
13131 break;
13132
13133 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13134 }
13135#endif
13136
13137 switch (pVCpu->iem.s.enmEffAddrMode)
13138 {
13139 case IEMMODE_16BIT:
13140 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13142 IEM_MC_IF_CX_IS_NOT_ONE() {
13143 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13144 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13145 } IEM_MC_ELSE() {
13146 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13147 IEM_MC_ADVANCE_RIP_AND_FINISH();
13148 } IEM_MC_ENDIF();
13149 IEM_MC_END();
13150 break;
13151
13152 case IEMMODE_32BIT:
13153 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13155 IEM_MC_IF_ECX_IS_NOT_ONE() {
13156 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13157 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13158 } IEM_MC_ELSE() {
13159 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13160 IEM_MC_ADVANCE_RIP_AND_FINISH();
13161 } IEM_MC_ENDIF();
13162 IEM_MC_END();
13163 break;
13164
13165 case IEMMODE_64BIT:
13166 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13168 IEM_MC_IF_RCX_IS_NOT_ONE() {
13169 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13170 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13171 } IEM_MC_ELSE() {
13172 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13173 IEM_MC_ADVANCE_RIP_AND_FINISH();
13174 } IEM_MC_ENDIF();
13175 IEM_MC_END();
13176 break;
13177
13178 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13179 }
13180}
13181
13182
13183/**
13184 * @opcode 0xe3
13185 */
13186FNIEMOP_DEF(iemOp_jecxz_Jb)
13187{
13188 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
13189 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13190 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13191
13192 switch (pVCpu->iem.s.enmEffAddrMode)
13193 {
13194 case IEMMODE_16BIT:
13195 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13197 IEM_MC_IF_CX_IS_NZ() {
13198 IEM_MC_ADVANCE_RIP_AND_FINISH();
13199 } IEM_MC_ELSE() {
13200 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13201 } IEM_MC_ENDIF();
13202 IEM_MC_END();
13203 break;
13204
13205 case IEMMODE_32BIT:
13206 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13208 IEM_MC_IF_ECX_IS_NZ() {
13209 IEM_MC_ADVANCE_RIP_AND_FINISH();
13210 } IEM_MC_ELSE() {
13211 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13212 } IEM_MC_ENDIF();
13213 IEM_MC_END();
13214 break;
13215
13216 case IEMMODE_64BIT:
13217 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13219 IEM_MC_IF_RCX_IS_NZ() {
13220 IEM_MC_ADVANCE_RIP_AND_FINISH();
13221 } IEM_MC_ELSE() {
13222 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13223 } IEM_MC_ENDIF();
13224 IEM_MC_END();
13225 break;
13226
13227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13228 }
13229}
13230
13231
13232/**
13233 * @opcode 0xe4
13234 * @opfltest iopl
13235 */
13236FNIEMOP_DEF(iemOp_in_AL_Ib)
13237{
13238 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
13239 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13241 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13242 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13243}
13244
13245
13246/**
13247 * @opcode 0xe5
13248 * @opfltest iopl
13249 */
13250FNIEMOP_DEF(iemOp_in_eAX_Ib)
13251{
13252 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
13253 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13255 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13256 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13257 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13258}
13259
13260
13261/**
13262 * @opcode 0xe6
13263 * @opfltest iopl
13264 */
13265FNIEMOP_DEF(iemOp_out_Ib_AL)
13266{
13267 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
13268 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13270 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13271 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13272}
13273
13274
13275/**
13276 * @opcode 0xe7
13277 * @opfltest iopl
13278 */
13279FNIEMOP_DEF(iemOp_out_Ib_eAX)
13280{
13281 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
13282 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13284 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13285 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13286 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13287}
13288
13289
13290/**
13291 * @opcode 0xe8
13292 */
13293FNIEMOP_DEF(iemOp_call_Jv)
13294{
13295 IEMOP_MNEMONIC(call_Jv, "call Jv");
13296 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13297 switch (pVCpu->iem.s.enmEffOpSize)
13298 {
13299 case IEMMODE_16BIT:
13300 {
13301 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13302 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13303 iemCImpl_call_rel_16, (int16_t)u16Imm);
13304 }
13305
13306 case IEMMODE_32BIT:
13307 {
13308 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13309 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13310 iemCImpl_call_rel_32, (int32_t)u32Imm);
13311 }
13312
13313 case IEMMODE_64BIT:
13314 {
13315 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13316 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13317 iemCImpl_call_rel_64, u64Imm);
13318 }
13319
13320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13321 }
13322}
13323
13324
13325/**
13326 * @opcode 0xe9
13327 */
13328FNIEMOP_DEF(iemOp_jmp_Jv)
13329{
13330 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
13331 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13332 switch (pVCpu->iem.s.enmEffOpSize)
13333 {
13334 case IEMMODE_16BIT:
13335 IEM_MC_BEGIN(0, 0, 0, 0);
13336 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
13337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13338 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
13339 IEM_MC_END();
13340 break;
13341
13342 case IEMMODE_64BIT:
13343 case IEMMODE_32BIT:
13344 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13345 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
13346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13347 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
13348 IEM_MC_END();
13349 break;
13350
13351 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13352 }
13353}
13354
13355
13356/**
13357 * @opcode 0xea
13358 */
13359FNIEMOP_DEF(iemOp_jmp_Ap)
13360{
13361 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
13362 IEMOP_HLP_NO_64BIT();
13363
13364 /* Decode the far pointer address and pass it on to the far call C implementation. */
13365 uint32_t off32Seg;
13366 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
13367 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
13368 else
13369 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
13370 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
13371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13372 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
13373 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
13374 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
13375 /** @todo make task-switches, ring-switches, ++ return non-zero status */
13376}
13377
13378
13379/**
13380 * @opcode 0xeb
13381 */
13382FNIEMOP_DEF(iemOp_jmp_Jb)
13383{
13384 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
13385 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13386 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13387
13388 IEM_MC_BEGIN(0, 0, 0, 0);
13389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13390 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13391 IEM_MC_END();
13392}
13393
13394
13395/**
13396 * @opcode 0xec
13397 * @opfltest iopl
13398 */
13399FNIEMOP_DEF(iemOp_in_AL_DX)
13400{
13401 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
13402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13403 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
13404 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13405 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
13406}
13407
13408
13409/**
13410 * @opcode 0xed
13411 * @opfltest iopl
13412 */
13413FNIEMOP_DEF(iemOp_in_eAX_DX)
13414{
13415 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
13416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13417 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
13418 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13419 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13420 pVCpu->iem.s.enmEffAddrMode);
13421}
13422
13423
13424/**
13425 * @opcode 0xee
13426 * @opfltest iopl
13427 */
13428FNIEMOP_DEF(iemOp_out_DX_AL)
13429{
13430 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
13431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13432 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13433 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
13434}
13435
13436
13437/**
13438 * @opcode 0xef
13439 * @opfltest iopl
13440 */
13441FNIEMOP_DEF(iemOp_out_DX_eAX)
13442{
13443 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
13444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13445 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13446 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13447 pVCpu->iem.s.enmEffAddrMode);
13448}
13449
13450
13451/**
13452 * @opcode 0xf0
13453 */
13454FNIEMOP_DEF(iemOp_lock)
13455{
13456 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
13457 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
13458
13459 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13460 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13461}
13462
13463
13464/**
13465 * @opcode 0xf1
13466 */
13467FNIEMOP_DEF(iemOp_int1)
13468{
13469 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
13470 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
13471 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
13472 * LOADALL memo. Needs some testing. */
13473 IEMOP_HLP_MIN_386();
13474 /** @todo testcase! */
13475 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
13476 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
13477 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
13478}
13479
13480
13481/**
13482 * @opcode 0xf2
13483 */
13484FNIEMOP_DEF(iemOp_repne)
13485{
13486 /* This overrides any previous REPE prefix. */
13487 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
13488 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
13489 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
13490
13491 /* For the 4 entry opcode tables, REPNZ overrides any previous
13492 REPZ and operand size prefixes. */
13493 pVCpu->iem.s.idxPrefix = 3;
13494
13495 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13496 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13497}
13498
13499
13500/**
13501 * @opcode 0xf3
13502 */
13503FNIEMOP_DEF(iemOp_repe)
13504{
13505 /* This overrides any previous REPNE prefix. */
13506 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
13507 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
13508 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
13509
13510 /* For the 4 entry opcode tables, REPNZ overrides any previous
13511 REPNZ and operand size prefixes. */
13512 pVCpu->iem.s.idxPrefix = 2;
13513
13514 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13515 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13516}
13517
13518
13519/**
13520 * @opcode 0xf4
13521 */
13522FNIEMOP_DEF(iemOp_hlt)
13523{
13524 IEMOP_MNEMONIC(hlt, "hlt");
13525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13526 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_hlt);
13527}
13528
13529
13530/**
13531 * @opcode 0xf5
13532 * @opflmodify cf
13533 */
13534FNIEMOP_DEF(iemOp_cmc)
13535{
13536 IEMOP_MNEMONIC(cmc, "cmc");
13537 IEM_MC_BEGIN(0, 0, 0, 0);
13538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13539 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
13540 IEM_MC_ADVANCE_RIP_AND_FINISH();
13541 IEM_MC_END();
13542}
13543
13544
13545/**
13546 * Body for of 'inc/dec/not/neg Eb'.
13547 */
13548#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
13549 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
13550 { \
13551 /* register access */ \
13552 IEM_MC_BEGIN(2, 0, 0, 0); \
13553 IEMOP_HLP_DONE_DECODING(); \
13554 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13555 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13556 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
13557 IEM_MC_REF_EFLAGS(pEFlags); \
13558 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
13559 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13560 IEM_MC_END(); \
13561 } \
13562 else \
13563 { \
13564 /* memory access. */ \
13565 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
13566 { \
13567 IEM_MC_BEGIN(2, 2, 0, 0); \
13568 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13569 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13571 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13572 \
13573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
13574 IEMOP_HLP_DONE_DECODING(); \
13575 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13576 IEM_MC_FETCH_EFLAGS(EFlags); \
13577 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
13578 \
13579 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13580 IEM_MC_COMMIT_EFLAGS(EFlags); \
13581 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13582 IEM_MC_END(); \
13583 } \
13584 else \
13585 { \
13586 IEM_MC_BEGIN(2, 2, 0, 0); \
13587 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13588 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13589 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13590 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13591 \
13592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
13593 IEMOP_HLP_DONE_DECODING(); \
13594 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13595 IEM_MC_FETCH_EFLAGS(EFlags); \
13596 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
13597 \
13598 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13599 IEM_MC_COMMIT_EFLAGS(EFlags); \
13600 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13601 IEM_MC_END(); \
13602 } \
13603 } \
13604 (void)0
13605
13606
13607/**
13608 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
13609 */
13610#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
13611 if (IEM_IS_MODRM_REG_MODE(bRm)) \
13612 { \
13613 /* \
13614 * Register target \
13615 */ \
13616 switch (pVCpu->iem.s.enmEffOpSize) \
13617 { \
13618 case IEMMODE_16BIT: \
13619 IEM_MC_BEGIN(2, 0, 0, 0); \
13620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13621 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13622 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13623 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13624 IEM_MC_REF_EFLAGS(pEFlags); \
13625 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13626 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13627 IEM_MC_END(); \
13628 break; \
13629 \
13630 case IEMMODE_32BIT: \
13631 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0); \
13632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13633 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13634 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13635 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13636 IEM_MC_REF_EFLAGS(pEFlags); \
13637 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13638 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
13639 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13640 IEM_MC_END(); \
13641 break; \
13642 \
13643 case IEMMODE_64BIT: \
13644 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0); \
13645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13646 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13647 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13648 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13649 IEM_MC_REF_EFLAGS(pEFlags); \
13650 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13651 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13652 IEM_MC_END(); \
13653 break; \
13654 \
13655 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13656 } \
13657 } \
13658 else \
13659 { \
13660 /* \
13661 * Memory target. \
13662 */ \
13663 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
13664 { \
13665 switch (pVCpu->iem.s.enmEffOpSize) \
13666 { \
13667 case IEMMODE_16BIT: \
13668 IEM_MC_BEGIN(2, 3, 0, 0); \
13669 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13670 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13672 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13673 \
13674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13675 IEMOP_HLP_DONE_DECODING(); \
13676 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13677 IEM_MC_FETCH_EFLAGS(EFlags); \
13678 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13679 \
13680 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13681 IEM_MC_COMMIT_EFLAGS(EFlags); \
13682 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13683 IEM_MC_END(); \
13684 break; \
13685 \
13686 case IEMMODE_32BIT: \
13687 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13688 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13689 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13691 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13692 \
13693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13694 IEMOP_HLP_DONE_DECODING(); \
13695 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13696 IEM_MC_FETCH_EFLAGS(EFlags); \
13697 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13698 \
13699 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13700 IEM_MC_COMMIT_EFLAGS(EFlags); \
13701 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13702 IEM_MC_END(); \
13703 break; \
13704 \
13705 case IEMMODE_64BIT: \
13706 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13707 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13708 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13709 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13710 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13711 \
13712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13713 IEMOP_HLP_DONE_DECODING(); \
13714 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13715 IEM_MC_FETCH_EFLAGS(EFlags); \
13716 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13717 \
13718 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13719 IEM_MC_COMMIT_EFLAGS(EFlags); \
13720 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13721 IEM_MC_END(); \
13722 break; \
13723 \
13724 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13725 } \
13726 } \
13727 else \
13728 { \
13729 (void)0
13730
13731#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
13732 switch (pVCpu->iem.s.enmEffOpSize) \
13733 { \
13734 case IEMMODE_16BIT: \
13735 IEM_MC_BEGIN(2, 3, 0, 0); \
13736 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13737 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13738 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13739 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13740 \
13741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13742 IEMOP_HLP_DONE_DECODING(); \
13743 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13744 IEM_MC_FETCH_EFLAGS(EFlags); \
13745 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
13746 \
13747 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13748 IEM_MC_COMMIT_EFLAGS(EFlags); \
13749 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13750 IEM_MC_END(); \
13751 break; \
13752 \
13753 case IEMMODE_32BIT: \
13754 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13755 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13756 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13758 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13759 \
13760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13761 IEMOP_HLP_DONE_DECODING(); \
13762 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13763 IEM_MC_FETCH_EFLAGS(EFlags); \
13764 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
13765 \
13766 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13767 IEM_MC_COMMIT_EFLAGS(EFlags); \
13768 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13769 IEM_MC_END(); \
13770 break; \
13771 \
13772 case IEMMODE_64BIT: \
13773 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13774 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13775 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13777 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13778 \
13779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13780 IEMOP_HLP_DONE_DECODING(); \
13781 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13782 IEM_MC_FETCH_EFLAGS(EFlags); \
13783 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
13784 \
13785 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13786 IEM_MC_COMMIT_EFLAGS(EFlags); \
13787 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13788 IEM_MC_END(); \
13789 break; \
13790 \
13791 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13792 } \
13793 } \
13794 } \
13795 (void)0
13796
13797
13798/**
13799 * @opmaps grp3_f6
13800 * @opcode /0
13801 * @opflclass logical
13802 * @todo also /1
13803 */
13804FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
13805{
13806 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
13807 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
13808
13809 if (IEM_IS_MODRM_REG_MODE(bRm))
13810 {
13811 /* register access */
13812 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13813 IEM_MC_BEGIN(3, 0, 0, 0);
13814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13815 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13816 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
13817 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13818 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13819 IEM_MC_REF_EFLAGS(pEFlags);
13820 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13821 IEM_MC_ADVANCE_RIP_AND_FINISH();
13822 IEM_MC_END();
13823 }
13824 else
13825 {
13826 /* memory access. */
13827 IEM_MC_BEGIN(3, 3, 0, 0);
13828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13830
13831 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13833
13834 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13835 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
13836 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13837
13838 IEM_MC_ARG_CONST(uint8_t, u8Src, u8Imm, 1);
13839 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13840 IEM_MC_FETCH_EFLAGS(EFlags);
13841 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13842
13843 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
13844 IEM_MC_COMMIT_EFLAGS(EFlags);
13845 IEM_MC_ADVANCE_RIP_AND_FINISH();
13846 IEM_MC_END();
13847 }
13848}
13849
13850
13851/* Body for opcode 0xf6 variations /4, /5, /6 and /7. */
13852#define IEMOP_GRP3_MUL_DIV_EB(bRm, a_pfnU8Expr) \
13853 PFNIEMAIMPLMULDIVU8 const pfnU8 = (a_pfnU8Expr); \
13854 if (IEM_IS_MODRM_REG_MODE(bRm)) \
13855 { \
13856 /* register access */ \
13857 IEM_MC_BEGIN(3, 1, 0, 0); \
13858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13859 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
13860 IEM_MC_ARG(uint8_t, u8Value, 1); \
13861 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
13862 IEM_MC_LOCAL(int32_t, rc); \
13863 \
13864 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13865 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
13866 IEM_MC_REF_EFLAGS(pEFlags); \
13867 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags); \
13868 IEM_MC_IF_LOCAL_IS_Z(rc) { \
13869 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13870 } IEM_MC_ELSE() { \
13871 IEM_MC_RAISE_DIVIDE_ERROR(); \
13872 } IEM_MC_ENDIF(); \
13873 \
13874 IEM_MC_END(); \
13875 } \
13876 else \
13877 { \
13878 /* memory access. */ \
13879 IEM_MC_BEGIN(3, 2, 0, 0); \
13880 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
13881 IEM_MC_ARG(uint8_t, u8Value, 1); \
13882 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
13883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13884 IEM_MC_LOCAL(int32_t, rc); \
13885 \
13886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13888 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13889 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
13890 IEM_MC_REF_EFLAGS(pEFlags); \
13891 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags); \
13892 IEM_MC_IF_LOCAL_IS_Z(rc) { \
13893 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13894 } IEM_MC_ELSE() { \
13895 IEM_MC_RAISE_DIVIDE_ERROR(); \
13896 } IEM_MC_ENDIF(); \
13897 \
13898 IEM_MC_END(); \
13899 } (void)0
13900
13901
13902/* Body for opcode 0xf7 variant /4, /5, /6 and /7. */
13903#define IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, a_pImplExpr) \
13904 PCIEMOPMULDIVSIZES const pImpl = (a_pImplExpr); \
13905 if (IEM_IS_MODRM_REG_MODE(bRm)) \
13906 { \
13907 /* register access */ \
13908 switch (pVCpu->iem.s.enmEffOpSize) \
13909 { \
13910 case IEMMODE_16BIT: \
13911 IEM_MC_BEGIN(4, 1, 0, 0); \
13912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13913 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
13914 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
13915 IEM_MC_ARG(uint16_t, u16Value, 2); \
13916 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
13917 IEM_MC_LOCAL(int32_t, rc); \
13918 \
13919 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13920 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
13921 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
13922 IEM_MC_REF_EFLAGS(pEFlags); \
13923 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
13924 IEM_MC_IF_LOCAL_IS_Z(rc) { \
13925 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13926 } IEM_MC_ELSE() { \
13927 IEM_MC_RAISE_DIVIDE_ERROR(); \
13928 } IEM_MC_ENDIF(); \
13929 \
13930 IEM_MC_END(); \
13931 break; \
13932 \
13933 case IEMMODE_32BIT: \
13934 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386, 0); \
13935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13936 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
13937 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
13938 IEM_MC_ARG(uint32_t, u32Value, 2); \
13939 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
13940 IEM_MC_LOCAL(int32_t, rc); \
13941 \
13942 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13943 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
13944 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
13945 IEM_MC_REF_EFLAGS(pEFlags); \
13946 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
13947 IEM_MC_IF_LOCAL_IS_Z(rc) { \
13948 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
13949 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
13950 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13951 } IEM_MC_ELSE() { \
13952 IEM_MC_RAISE_DIVIDE_ERROR(); \
13953 } IEM_MC_ENDIF(); \
13954 \
13955 IEM_MC_END(); \
13956 break; \
13957 \
13958 case IEMMODE_64BIT: \
13959 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0); \
13960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13961 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
13962 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
13963 IEM_MC_ARG(uint64_t, u64Value, 2); \
13964 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
13965 IEM_MC_LOCAL(int32_t, rc); \
13966 \
13967 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13968 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
13969 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
13970 IEM_MC_REF_EFLAGS(pEFlags); \
13971 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
13972 IEM_MC_IF_LOCAL_IS_Z(rc) { \
13973 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13974 } IEM_MC_ELSE() { \
13975 IEM_MC_RAISE_DIVIDE_ERROR(); \
13976 } IEM_MC_ENDIF(); \
13977 \
13978 IEM_MC_END(); \
13979 break; \
13980 \
13981 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13982 } \
13983 } \
13984 else \
13985 { \
13986 /* memory access. */ \
13987 switch (pVCpu->iem.s.enmEffOpSize) \
13988 { \
13989 case IEMMODE_16BIT: \
13990 IEM_MC_BEGIN(4, 2, 0, 0); \
13991 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
13992 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
13993 IEM_MC_ARG(uint16_t, u16Value, 2); \
13994 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
13995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13996 IEM_MC_LOCAL(int32_t, rc); \
13997 \
13998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14000 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14001 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14002 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
14003 IEM_MC_REF_EFLAGS(pEFlags); \
14004 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
14005 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14006 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14007 } IEM_MC_ELSE() { \
14008 IEM_MC_RAISE_DIVIDE_ERROR(); \
14009 } IEM_MC_ENDIF(); \
14010 \
14011 IEM_MC_END(); \
14012 break; \
14013 \
14014 case IEMMODE_32BIT: \
14015 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386, 0); \
14016 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
14017 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
14018 IEM_MC_ARG(uint32_t, u32Value, 2); \
14019 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14021 IEM_MC_LOCAL(int32_t, rc); \
14022 \
14023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14025 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14026 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
14027 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
14028 IEM_MC_REF_EFLAGS(pEFlags); \
14029 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
14030 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14031 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
14032 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
14033 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14034 } IEM_MC_ELSE() { \
14035 IEM_MC_RAISE_DIVIDE_ERROR(); \
14036 } IEM_MC_ENDIF(); \
14037 \
14038 IEM_MC_END(); \
14039 break; \
14040 \
14041 case IEMMODE_64BIT: \
14042 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT, 0); \
14043 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
14044 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
14045 IEM_MC_ARG(uint64_t, u64Value, 2); \
14046 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14048 IEM_MC_LOCAL(int32_t, rc); \
14049 \
14050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14052 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14053 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
14054 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
14055 IEM_MC_REF_EFLAGS(pEFlags); \
14056 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
14057 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14058 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14059 } IEM_MC_ELSE() { \
14060 IEM_MC_RAISE_DIVIDE_ERROR(); \
14061 } IEM_MC_ENDIF(); \
14062 \
14063 IEM_MC_END(); \
14064 break; \
14065 \
14066 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14067 } \
14068 } (void)0
14069
14070
14071/**
14072 * @opmaps grp3_f6
14073 * @opcode /2
14074 * @opflclass unchanged
14075 */
14076FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
14077{
14078/** @todo does not modify EFLAGS. */
14079 IEMOP_MNEMONIC(not_Eb, "not Eb");
14080 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
14081}
14082
14083
14084/**
14085 * @opmaps grp3_f6
14086 * @opcode /3
14087 * @opflclass arithmetic
14088 */
14089FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
14090{
14091 IEMOP_MNEMONIC(net_Eb, "neg Eb");
14092 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
14093}
14094
14095
14096/**
14097 * @opcode 0xf6
14098 */
14099FNIEMOP_DEF(iemOp_Grp3_Eb)
14100{
14101 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14102 switch (IEM_GET_MODRM_REG_8(bRm))
14103 {
14104 case 0:
14105 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
14106 case 1:
14107 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
14108 case 2:
14109 return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
14110 case 3:
14111 return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
14112 case 4:
14113 {
14114 /**
14115 * @opdone
14116 * @opmaps grp3_f6
14117 * @opcode /4
14118 * @opflclass multiply
14119 */
14120 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
14121 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14122 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
14123 break;
14124 }
14125 case 5:
14126 {
14127 /**
14128 * @opdone
14129 * @opmaps grp3_f6
14130 * @opcode /5
14131 * @opflclass multiply
14132 */
14133 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
14134 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14135 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
14136 break;
14137 }
14138 case 6:
14139 {
14140 /**
14141 * @opdone
14142 * @opmaps grp3_f6
14143 * @opcode /6
14144 * @opflclass division
14145 */
14146 IEMOP_MNEMONIC(div_Eb, "div Eb");
14147 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14148 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
14149 break;
14150 }
14151 case 7:
14152 {
14153 /**
14154 * @opdone
14155 * @opmaps grp3_f6
14156 * @opcode /7
14157 * @opflclass division
14158 */
14159 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
14160 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14161 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
14162 break;
14163 }
14164 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14165 }
14166}
14167
14168
14169/**
14170 * @opmaps grp3_f7
14171 * @opcode /0
14172 * @opflclass logical
14173 */
14174FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
14175{
14176 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
14177 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14178
14179 if (IEM_IS_MODRM_REG_MODE(bRm))
14180 {
14181 /* register access */
14182 switch (pVCpu->iem.s.enmEffOpSize)
14183 {
14184 case IEMMODE_16BIT:
14185 IEM_MC_BEGIN(3, 0, 0, 0);
14186 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14188 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14189 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
14190 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14191 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14192 IEM_MC_REF_EFLAGS(pEFlags);
14193 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14194 IEM_MC_ADVANCE_RIP_AND_FINISH();
14195 IEM_MC_END();
14196 break;
14197
14198 case IEMMODE_32BIT:
14199 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
14200 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14202 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14203 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
14204 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14205 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14206 IEM_MC_REF_EFLAGS(pEFlags);
14207 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14208 /* No clearing the high dword here - test doesn't write back the result. */
14209 IEM_MC_ADVANCE_RIP_AND_FINISH();
14210 IEM_MC_END();
14211 break;
14212
14213 case IEMMODE_64BIT:
14214 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
14215 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14217 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14218 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
14219 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14220 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14221 IEM_MC_REF_EFLAGS(pEFlags);
14222 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14223 IEM_MC_ADVANCE_RIP_AND_FINISH();
14224 IEM_MC_END();
14225 break;
14226
14227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14228 }
14229 }
14230 else
14231 {
14232 /* memory access. */
14233 switch (pVCpu->iem.s.enmEffOpSize)
14234 {
14235 case IEMMODE_16BIT:
14236 IEM_MC_BEGIN(3, 3, 0, 0);
14237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
14239
14240 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14242
14243 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14244 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
14245 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14246
14247 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
14248 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14249 IEM_MC_FETCH_EFLAGS(EFlags);
14250 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14251
14252 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14253 IEM_MC_COMMIT_EFLAGS(EFlags);
14254 IEM_MC_ADVANCE_RIP_AND_FINISH();
14255 IEM_MC_END();
14256 break;
14257
14258 case IEMMODE_32BIT:
14259 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
14260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
14262
14263 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14265
14266 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14267 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
14268 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14269
14270 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
14271 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14272 IEM_MC_FETCH_EFLAGS(EFlags);
14273 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14274
14275 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14276 IEM_MC_COMMIT_EFLAGS(EFlags);
14277 IEM_MC_ADVANCE_RIP_AND_FINISH();
14278 IEM_MC_END();
14279 break;
14280
14281 case IEMMODE_64BIT:
14282 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
14283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
14285
14286 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14288
14289 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
14290 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14291 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14292
14293 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1);
14294 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14295 IEM_MC_FETCH_EFLAGS(EFlags);
14296 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14297
14298 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14299 IEM_MC_COMMIT_EFLAGS(EFlags);
14300 IEM_MC_ADVANCE_RIP_AND_FINISH();
14301 IEM_MC_END();
14302 break;
14303
14304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14305 }
14306 }
14307}
14308
14309
14310/**
14311 * @opmaps grp3_f7
14312 * @opcode /2
14313 * @opflclass unchanged
14314 */
14315FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
14316{
14317/** @todo does not modify EFLAGS */
14318 IEMOP_MNEMONIC(not_Ev, "not Ev");
14319 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
14320 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
14321}
14322
14323
14324/**
14325 * @opmaps grp3_f7
14326 * @opcode /3
14327 * @opflclass arithmetic
14328 */
14329FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
14330{
14331 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
14332 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
14333 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
14334}
14335
14336
14337/**
14338 * @opmaps grp3_f7
14339 * @opcode /4
14340 * @opflclass multiply
14341 */
14342FNIEMOP_DEF_1(iemOp_grp3_mul_Ev, uint8_t, bRm)
14343{
14344 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
14345 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14346 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
14347}
14348
14349
14350/**
14351 * @opmaps grp3_f7
14352 * @opcode /5
14353 * @opflclass multiply
14354 */
14355FNIEMOP_DEF_1(iemOp_grp3_imul_Ev, uint8_t, bRm)
14356{
14357 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
14358 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14359 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
14360}
14361
14362
14363/**
14364 * @opmaps grp3_f7
14365 * @opcode /6
14366 * @opflclass division
14367 */
14368FNIEMOP_DEF_1(iemOp_grp3_div_Ev, uint8_t, bRm)
14369{
14370 IEMOP_MNEMONIC(div_Ev, "div Ev");
14371 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14372 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
14373}
14374
14375
14376/**
14377 * @opmaps grp3_f7
14378 * @opcode /7
14379 * @opflclass division
14380 */
14381FNIEMOP_DEF_1(iemOp_grp3_idiv_Ev, uint8_t, bRm)
14382{
14383 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
14384 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14385 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
14386}
14387
14388
14389/**
14390 * @opcode 0xf7
14391 */
14392FNIEMOP_DEF(iemOp_Grp3_Ev)
14393{
14394 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14395 switch (IEM_GET_MODRM_REG_8(bRm))
14396 {
14397 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
14398 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
14399 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
14400 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
14401 case 4: return FNIEMOP_CALL_1(iemOp_grp3_mul_Ev, bRm);
14402 case 5: return FNIEMOP_CALL_1(iemOp_grp3_imul_Ev, bRm);
14403 case 6: return FNIEMOP_CALL_1(iemOp_grp3_div_Ev, bRm);
14404 case 7: return FNIEMOP_CALL_1(iemOp_grp3_idiv_Ev, bRm);
14405 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14406 }
14407}
14408
14409
14410/**
14411 * @opcode 0xf8
14412 * @opflmodify cf
14413 * @opflclear cf
14414 */
14415FNIEMOP_DEF(iemOp_clc)
14416{
14417 IEMOP_MNEMONIC(clc, "clc");
14418 IEM_MC_BEGIN(0, 0, 0, 0);
14419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14420 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
14421 IEM_MC_ADVANCE_RIP_AND_FINISH();
14422 IEM_MC_END();
14423}
14424
14425
14426/**
14427 * @opcode 0xf9
14428 * @opflmodify cf
14429 * @opflset cf
14430 */
14431FNIEMOP_DEF(iemOp_stc)
14432{
14433 IEMOP_MNEMONIC(stc, "stc");
14434 IEM_MC_BEGIN(0, 0, 0, 0);
14435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14436 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
14437 IEM_MC_ADVANCE_RIP_AND_FINISH();
14438 IEM_MC_END();
14439}
14440
14441
14442/**
14443 * @opcode 0xfa
14444 * @opfltest iopl,vm
14445 * @opflmodify if,vif
14446 */
14447FNIEMOP_DEF(iemOp_cli)
14448{
14449 IEMOP_MNEMONIC(cli, "cli");
14450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14451 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, 0, iemCImpl_cli);
14452}
14453
14454
14455/**
14456 * @opcode 0xfb
14457 * @opfltest iopl,vm
14458 * @opflmodify if,vif
14459 */
14460FNIEMOP_DEF(iemOp_sti)
14461{
14462 IEMOP_MNEMONIC(sti, "sti");
14463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14464 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
14465 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, 0, iemCImpl_sti);
14466}
14467
14468
14469/**
14470 * @opcode 0xfc
14471 * @opflmodify df
14472 * @opflclear df
14473 */
14474FNIEMOP_DEF(iemOp_cld)
14475{
14476 IEMOP_MNEMONIC(cld, "cld");
14477 IEM_MC_BEGIN(0, 0, 0, 0);
14478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14479 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
14480 IEM_MC_ADVANCE_RIP_AND_FINISH();
14481 IEM_MC_END();
14482}
14483
14484
14485/**
14486 * @opcode 0xfd
14487 * @opflmodify df
14488 * @opflset df
14489 */
14490FNIEMOP_DEF(iemOp_std)
14491{
14492 IEMOP_MNEMONIC(std, "std");
14493 IEM_MC_BEGIN(0, 0, 0, 0);
14494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14495 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
14496 IEM_MC_ADVANCE_RIP_AND_FINISH();
14497 IEM_MC_END();
14498}
14499
14500
14501/**
14502 * @opmaps grp4
14503 * @opcode /0
14504 * @opflclass incdec
14505 */
14506FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
14507{
14508 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
14509 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
14510}
14511
14512
14513/**
14514 * @opmaps grp4
14515 * @opcode /1
14516 * @opflclass incdec
14517 */
14518FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
14519{
14520 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
14521 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
14522}
14523
14524
14525/**
14526 * @opcode 0xfe
14527 */
14528FNIEMOP_DEF(iemOp_Grp4)
14529{
14530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14531 switch (IEM_GET_MODRM_REG_8(bRm))
14532 {
14533 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
14534 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
14535 default:
14536 /** @todo is the eff-addr decoded? */
14537 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
14538 IEMOP_RAISE_INVALID_OPCODE_RET();
14539 }
14540}
14541
14542/**
14543 * @opmaps grp5
14544 * @opcode /0
14545 * @opflclass incdec
14546 */
14547FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
14548{
14549 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
14550 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
14551 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
14552}
14553
14554
14555/**
14556 * @opmaps grp5
14557 * @opcode /1
14558 * @opflclass incdec
14559 */
14560FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
14561{
14562 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
14563 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
14564 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
14565}
14566
14567
14568/**
14569 * Opcode 0xff /2.
14570 * @param bRm The RM byte.
14571 */
14572FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
14573{
14574 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
14575 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14576
14577 if (IEM_IS_MODRM_REG_MODE(bRm))
14578 {
14579 /* The new RIP is taken from a register. */
14580 switch (pVCpu->iem.s.enmEffOpSize)
14581 {
14582 case IEMMODE_16BIT:
14583 IEM_MC_BEGIN(1, 0, 0, 0);
14584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14585 IEM_MC_ARG(uint16_t, u16Target, 0);
14586 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14587 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
14588 IEM_MC_END();
14589 break;
14590
14591 case IEMMODE_32BIT:
14592 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
14593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14594 IEM_MC_ARG(uint32_t, u32Target, 0);
14595 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14596 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
14597 IEM_MC_END();
14598 break;
14599
14600 case IEMMODE_64BIT:
14601 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
14602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14603 IEM_MC_ARG(uint64_t, u64Target, 0);
14604 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14605 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
14606 IEM_MC_END();
14607 break;
14608
14609 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14610 }
14611 }
14612 else
14613 {
14614 /* The new RIP is taken from a register. */
14615 switch (pVCpu->iem.s.enmEffOpSize)
14616 {
14617 case IEMMODE_16BIT:
14618 IEM_MC_BEGIN(1, 1, 0, 0);
14619 IEM_MC_ARG(uint16_t, u16Target, 0);
14620 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14623 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14624 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
14625 IEM_MC_END();
14626 break;
14627
14628 case IEMMODE_32BIT:
14629 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
14630 IEM_MC_ARG(uint32_t, u32Target, 0);
14631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14634 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14635 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
14636 IEM_MC_END();
14637 break;
14638
14639 case IEMMODE_64BIT:
14640 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT, 0);
14641 IEM_MC_ARG(uint64_t, u64Target, 0);
14642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14643 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14645 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14646 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
14647 IEM_MC_END();
14648 break;
14649
14650 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14651 }
14652 }
14653}
14654
14655#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl, a_fCImplExtra) \
14656 /* Registers? How?? */ \
14657 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
14658 { /* likely */ } \
14659 else \
14660 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
14661 \
14662 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
14663 /** @todo what does VIA do? */ \
14664 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
14665 { /* likely */ } \
14666 else \
14667 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
14668 \
14669 /* Far pointer loaded from memory. */ \
14670 switch (pVCpu->iem.s.enmEffOpSize) \
14671 { \
14672 case IEMMODE_16BIT: \
14673 IEM_MC_BEGIN(3, 1, 0, 0); \
14674 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14675 IEM_MC_ARG(uint16_t, offSeg, 1); \
14676 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
14677 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14680 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14681 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
14682 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14683 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
14684 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14685 IEM_MC_END(); \
14686 break; \
14687 \
14688 case IEMMODE_32BIT: \
14689 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
14690 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14691 IEM_MC_ARG(uint32_t, offSeg, 1); \
14692 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
14693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14696 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14697 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
14698 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14699 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
14700 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14701 IEM_MC_END(); \
14702 break; \
14703 \
14704 case IEMMODE_64BIT: \
14705 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
14706 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0); \
14707 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14708 IEM_MC_ARG(uint64_t, offSeg, 1); \
14709 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
14710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14713 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14714 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
14715 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14716 | IEM_CIMPL_F_MODE /* no gates */, 0, \
14717 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14718 IEM_MC_END(); \
14719 break; \
14720 \
14721 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14722 } do {} while (0)
14723
14724
14725/**
14726 * Opcode 0xff /3.
14727 * @param bRm The RM byte.
14728 */
14729FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
14730{
14731 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
14732 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf, IEM_CIMPL_F_BRANCH_STACK);
14733}
14734
14735
14736/**
14737 * Opcode 0xff /4.
14738 * @param bRm The RM byte.
14739 */
14740FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
14741{
14742 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
14743 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14744
14745 if (IEM_IS_MODRM_REG_MODE(bRm))
14746 {
14747 /* The new RIP is taken from a register. */
14748 switch (pVCpu->iem.s.enmEffOpSize)
14749 {
14750 case IEMMODE_16BIT:
14751 IEM_MC_BEGIN(0, 1, 0, 0);
14752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14753 IEM_MC_LOCAL(uint16_t, u16Target);
14754 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14755 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14756 IEM_MC_END();
14757 break;
14758
14759 case IEMMODE_32BIT:
14760 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
14761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14762 IEM_MC_LOCAL(uint32_t, u32Target);
14763 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14764 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14765 IEM_MC_END();
14766 break;
14767
14768 case IEMMODE_64BIT:
14769 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
14770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14771 IEM_MC_LOCAL(uint64_t, u64Target);
14772 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14773 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14774 IEM_MC_END();
14775 break;
14776
14777 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14778 }
14779 }
14780 else
14781 {
14782 /* The new RIP is taken from a memory location. */
14783 switch (pVCpu->iem.s.enmEffOpSize)
14784 {
14785 case IEMMODE_16BIT:
14786 IEM_MC_BEGIN(0, 2, 0, 0);
14787 IEM_MC_LOCAL(uint16_t, u16Target);
14788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14791 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14792 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14793 IEM_MC_END();
14794 break;
14795
14796 case IEMMODE_32BIT:
14797 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
14798 IEM_MC_LOCAL(uint32_t, u32Target);
14799 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14802 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14803 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14804 IEM_MC_END();
14805 break;
14806
14807 case IEMMODE_64BIT:
14808 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14809 IEM_MC_LOCAL(uint64_t, u64Target);
14810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14813 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14814 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14815 IEM_MC_END();
14816 break;
14817
14818 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14819 }
14820 }
14821}
14822
14823
14824/**
14825 * Opcode 0xff /5.
14826 * @param bRm The RM byte.
14827 */
14828FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
14829{
14830 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
14831 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp, 0);
14832}
14833
14834
14835/**
14836 * Opcode 0xff /6.
14837 * @param bRm The RM byte.
14838 */
14839FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
14840{
14841 IEMOP_MNEMONIC(push_Ev, "push Ev");
14842
14843 /* Registers are handled by a common worker. */
14844 if (IEM_IS_MODRM_REG_MODE(bRm))
14845 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
14846
14847 /* Memory we do here. */
14848 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14849 switch (pVCpu->iem.s.enmEffOpSize)
14850 {
14851 case IEMMODE_16BIT:
14852 IEM_MC_BEGIN(0, 2, 0, 0);
14853 IEM_MC_LOCAL(uint16_t, u16Src);
14854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14857 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14858 IEM_MC_PUSH_U16(u16Src);
14859 IEM_MC_ADVANCE_RIP_AND_FINISH();
14860 IEM_MC_END();
14861 break;
14862
14863 case IEMMODE_32BIT:
14864 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
14865 IEM_MC_LOCAL(uint32_t, u32Src);
14866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14869 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14870 IEM_MC_PUSH_U32(u32Src);
14871 IEM_MC_ADVANCE_RIP_AND_FINISH();
14872 IEM_MC_END();
14873 break;
14874
14875 case IEMMODE_64BIT:
14876 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14877 IEM_MC_LOCAL(uint64_t, u64Src);
14878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14881 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14882 IEM_MC_PUSH_U64(u64Src);
14883 IEM_MC_ADVANCE_RIP_AND_FINISH();
14884 IEM_MC_END();
14885 break;
14886
14887 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14888 }
14889}
14890
14891
14892/**
14893 * @opcode 0xff
14894 */
14895FNIEMOP_DEF(iemOp_Grp5)
14896{
14897 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14898 switch (IEM_GET_MODRM_REG_8(bRm))
14899 {
14900 case 0:
14901 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
14902 case 1:
14903 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
14904 case 2:
14905 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
14906 case 3:
14907 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
14908 case 4:
14909 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
14910 case 5:
14911 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
14912 case 6:
14913 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
14914 case 7:
14915 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
14916 IEMOP_RAISE_INVALID_OPCODE_RET();
14917 }
14918 AssertFailedReturn(VERR_IEM_IPE_3);
14919}
14920
14921
14922
14923const PFNIEMOP g_apfnOneByteMap[256] =
14924{
14925 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
14926 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
14927 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
14928 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
14929 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
14930 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
14931 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
14932 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
14933 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
14934 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
14935 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
14936 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
14937 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
14938 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
14939 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
14940 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
14941 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
14942 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
14943 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
14944 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
14945 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
14946 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
14947 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
14948 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
14949 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
14950 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
14951 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
14952 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
14953 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
14954 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
14955 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
14956 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
14957 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
14958 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
14959 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
14960 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
14961 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
14962 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
14963 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
14964 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
14965 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
14966 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
14967 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
14968 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
14969 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
14970 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
14971 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
14972 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
14973 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
14974 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
14975 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
14976 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
14977 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
14978 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
14979 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
14980 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
14981 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
14982 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
14983 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
14984 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
14985 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
14986 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
14987 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
14988 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
14989};
14990
14991
14992/** @} */
14993
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette