VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 102010

Last change on this file since 102010 was 101984, checked in by vboxsync, 16 months ago

VMM/IEM: Added a flush mask for guest register shadows to the IEM_MC_DEFER_TO_CIMPL_X_RET macros to better manage register optimizations when recompiling to native code. bugref:10371

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 530.4 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 101984 2023-11-08 15:56:18Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 *
63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
64 */
65#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8) \
66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
67 \
68 /* \
69 * If rm is denoting a register, no more instruction bytes. \
70 */ \
71 if (IEM_IS_MODRM_REG_MODE(bRm)) \
72 { \
73 IEM_MC_BEGIN(3, 0, 0, 0); \
74 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
75 IEM_MC_ARG(uint8_t, u8Src, 1); \
76 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
77 \
78 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
79 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
80 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
81 IEM_MC_REF_EFLAGS(pEFlags); \
82 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
83 \
84 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
85 IEM_MC_END(); \
86 } \
87 else \
88 { \
89 /* \
90 * We're accessing memory. \
91 * Note! We're putting the eflags on the stack here so we can commit them \
92 * after the memory. \
93 */ \
94 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
95 { \
96 IEM_MC_BEGIN(3, 3, 0, 0); \
97 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
98 IEM_MC_ARG(uint8_t, u8Src, 1); \
99 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
101 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
102 \
103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
104 IEMOP_HLP_DONE_DECODING(); \
105 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
106 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
107 IEM_MC_FETCH_EFLAGS(EFlags); \
108 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
109 \
110 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
111 IEM_MC_COMMIT_EFLAGS(EFlags); \
112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
113 IEM_MC_END(); \
114 } \
115 else \
116 { \
117 (void)0
118
119/**
120 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
121 * operands.
122 *
123 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
124 */
125#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8) \
126 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
127 \
128 /* \
129 * If rm is denoting a register, no more instruction bytes. \
130 */ \
131 if (IEM_IS_MODRM_REG_MODE(bRm)) \
132 { \
133 IEM_MC_BEGIN(3, 0, 0, 0); \
134 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
135 IEM_MC_ARG(uint8_t, u8Src, 1); \
136 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
137 \
138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
139 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
140 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
141 IEM_MC_REF_EFLAGS(pEFlags); \
142 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
143 \
144 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
145 IEM_MC_END(); \
146 } \
147 else \
148 { \
149 /* \
150 * We're accessing memory. \
151 * Note! We're putting the eflags on the stack here so we can commit them \
152 * after the memory. \
153 */ \
154 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
155 { \
156 IEM_MC_BEGIN(3, 3, 0, 0); \
157 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
158 IEM_MC_ARG(uint8_t, u8Src, 1); \
159 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
161 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
162 \
163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
164 IEMOP_HLP_DONE_DECODING(); \
165 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
166 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
167 IEM_MC_FETCH_EFLAGS(EFlags); \
168 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
169 \
170 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
171 IEM_MC_COMMIT_EFLAGS(EFlags); \
172 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
173 IEM_MC_END(); \
174 } \
175 else \
176 { \
177 (void)0
178
179#define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \
180 IEMOP_HLP_DONE_DECODING(); \
181 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
182 } \
183 } \
184 (void)0
185
186#define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \
187 IEM_MC_BEGIN(3, 3, 0, 0); \
188 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
189 IEM_MC_ARG(uint8_t, u8Src, 1); \
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
192 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
193 \
194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
195 IEMOP_HLP_DONE_DECODING(); \
196 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
197 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
198 IEM_MC_FETCH_EFLAGS(EFlags); \
199 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
200 \
201 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bMapInfoDst); \
202 IEM_MC_COMMIT_EFLAGS(EFlags); \
203 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
204 IEM_MC_END(); \
205 } \
206 } \
207 (void)0
208
209/**
210 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
211 * destination.
212 */
213#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
215 \
216 /* \
217 * If rm is denoting a register, no more instruction bytes. \
218 */ \
219 if (IEM_IS_MODRM_REG_MODE(bRm)) \
220 { \
221 IEM_MC_BEGIN(3, 0, 0, 0); \
222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
223 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
224 IEM_MC_ARG(uint8_t, u8Src, 1); \
225 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
226 \
227 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
228 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
229 IEM_MC_REF_EFLAGS(pEFlags); \
230 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
231 \
232 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
233 IEM_MC_END(); \
234 } \
235 else \
236 { \
237 /* \
238 * We're accessing memory. \
239 */ \
240 IEM_MC_BEGIN(3, 1, 0, 0); \
241 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
242 IEM_MC_ARG(uint8_t, u8Src, 1); \
243 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
245 \
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
248 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
249 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
250 IEM_MC_REF_EFLAGS(pEFlags); \
251 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
252 \
253 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
254 IEM_MC_END(); \
255 } \
256 (void)0
257
258
259/**
260 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
261 * memory/register as the destination.
262 */
263#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
265 \
266 /* \
267 * If rm is denoting a register, no more instruction bytes. \
268 */ \
269 if (IEM_IS_MODRM_REG_MODE(bRm)) \
270 { \
271 switch (pVCpu->iem.s.enmEffOpSize) \
272 { \
273 case IEMMODE_16BIT: \
274 IEM_MC_BEGIN(3, 0, 0, 0); \
275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
276 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
277 IEM_MC_ARG(uint16_t, u16Src, 1); \
278 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
279 \
280 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
281 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
282 IEM_MC_REF_EFLAGS(pEFlags); \
283 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
284 \
285 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
286 IEM_MC_END(); \
287 break; \
288 \
289 case IEMMODE_32BIT: \
290 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
292 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
293 IEM_MC_ARG(uint32_t, u32Src, 1); \
294 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
295 \
296 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
297 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
298 IEM_MC_REF_EFLAGS(pEFlags); \
299 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
300 \
301 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
302 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
303 IEM_MC_END(); \
304 break; \
305 \
306 case IEMMODE_64BIT: \
307 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
309 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
310 IEM_MC_ARG(uint64_t, u64Src, 1); \
311 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
312 \
313 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
314 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
315 IEM_MC_REF_EFLAGS(pEFlags); \
316 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
317 \
318 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
319 IEM_MC_END(); \
320 break; \
321 \
322 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
323 } \
324 } \
325 else \
326 { \
327 /* \
328 * We're accessing memory. \
329 * Note! We're putting the eflags on the stack here so we can commit them \
330 * after the memory. \
331 */ \
332 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
333 { \
334 switch (pVCpu->iem.s.enmEffOpSize) \
335 { \
336 case IEMMODE_16BIT: \
337 IEM_MC_BEGIN(3, 3, 0, 0); \
338 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
339 IEM_MC_ARG(uint16_t, u16Src, 1); \
340 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
342 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
343 \
344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
345 IEMOP_HLP_DONE_DECODING(); \
346 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
347 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
348 IEM_MC_FETCH_EFLAGS(EFlags); \
349 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
350 \
351 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
352 IEM_MC_COMMIT_EFLAGS(EFlags); \
353 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
354 IEM_MC_END(); \
355 break; \
356 \
357 case IEMMODE_32BIT: \
358 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
359 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
360 IEM_MC_ARG(uint32_t, u32Src, 1); \
361 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
363 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
364 \
365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
366 IEMOP_HLP_DONE_DECODING(); \
367 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
368 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
369 IEM_MC_FETCH_EFLAGS(EFlags); \
370 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
371 \
372 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
373 IEM_MC_COMMIT_EFLAGS(EFlags); \
374 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
375 IEM_MC_END(); \
376 break; \
377 \
378 case IEMMODE_64BIT: \
379 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
380 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
381 IEM_MC_ARG(uint64_t, u64Src, 1); \
382 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
384 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
385 \
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
387 IEMOP_HLP_DONE_DECODING(); \
388 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
389 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
390 IEM_MC_FETCH_EFLAGS(EFlags); \
391 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
392 \
393 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
394 IEM_MC_COMMIT_EFLAGS(EFlags); \
395 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
396 IEM_MC_END(); \
397 break; \
398 \
399 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
400 } \
401 } \
402 else \
403 { \
404 (void)0
405/* Separate macro to work around parsing issue in IEMAllInstPython.py */
406#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
407 switch (pVCpu->iem.s.enmEffOpSize) \
408 { \
409 case IEMMODE_16BIT: \
410 IEM_MC_BEGIN(3, 3, 0, 0); \
411 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
412 IEM_MC_ARG(uint16_t, u16Src, 1); \
413 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
415 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
416 \
417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
418 IEMOP_HLP_DONE_DECODING(); \
419 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
420 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
421 IEM_MC_FETCH_EFLAGS(EFlags); \
422 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
423 \
424 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
425 IEM_MC_COMMIT_EFLAGS(EFlags); \
426 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
427 IEM_MC_END(); \
428 break; \
429 \
430 case IEMMODE_32BIT: \
431 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
432 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
433 IEM_MC_ARG(uint32_t, u32Src, 1); \
434 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
436 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
437 \
438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
439 IEMOP_HLP_DONE_DECODING(); \
440 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
441 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
442 IEM_MC_FETCH_EFLAGS(EFlags); \
443 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
444 \
445 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo /* CMP,TEST */); \
446 IEM_MC_COMMIT_EFLAGS(EFlags); \
447 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
448 IEM_MC_END(); \
449 break; \
450 \
451 case IEMMODE_64BIT: \
452 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
453 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
454 IEM_MC_ARG(uint64_t, u64Src, 1); \
455 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
457 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
458 \
459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
460 IEMOP_HLP_DONE_DECODING(); \
461 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
462 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
463 IEM_MC_FETCH_EFLAGS(EFlags); \
464 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
465 \
466 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
467 IEM_MC_COMMIT_EFLAGS(EFlags); \
468 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
469 IEM_MC_END(); \
470 break; \
471 \
472 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
473 } \
474 } \
475 } \
476 (void)0
477
478/**
479 * Body for read-only word/dword/qword instructions like TEST and CMP with
480 * memory/register as the destination.
481 */
482#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
484 \
485 /* \
486 * If rm is denoting a register, no more instruction bytes. \
487 */ \
488 if (IEM_IS_MODRM_REG_MODE(bRm)) \
489 { \
490 switch (pVCpu->iem.s.enmEffOpSize) \
491 { \
492 case IEMMODE_16BIT: \
493 IEM_MC_BEGIN(3, 0, 0, 0); \
494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
495 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
496 IEM_MC_ARG(uint16_t, u16Src, 1); \
497 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
498 \
499 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
500 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
501 IEM_MC_REF_EFLAGS(pEFlags); \
502 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
503 \
504 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
505 IEM_MC_END(); \
506 break; \
507 \
508 case IEMMODE_32BIT: \
509 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
511 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
512 IEM_MC_ARG(uint32_t, u32Src, 1); \
513 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
514 \
515 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
516 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
517 IEM_MC_REF_EFLAGS(pEFlags); \
518 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
519 \
520 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
521 IEM_MC_END(); \
522 break; \
523 \
524 case IEMMODE_64BIT: \
525 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
527 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
528 IEM_MC_ARG(uint64_t, u64Src, 1); \
529 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
530 \
531 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
532 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
533 IEM_MC_REF_EFLAGS(pEFlags); \
534 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
535 \
536 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
537 IEM_MC_END(); \
538 break; \
539 \
540 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
541 } \
542 } \
543 else \
544 { \
545 /* \
546 * We're accessing memory. \
547 * Note! We're putting the eflags on the stack here so we can commit them \
548 * after the memory. \
549 */ \
550 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
551 { \
552 switch (pVCpu->iem.s.enmEffOpSize) \
553 { \
554 case IEMMODE_16BIT: \
555 IEM_MC_BEGIN(3, 3, 0, 0); \
556 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
557 IEM_MC_ARG(uint16_t, u16Src, 1); \
558 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
560 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
561 \
562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
563 IEMOP_HLP_DONE_DECODING(); \
564 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
565 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
566 IEM_MC_FETCH_EFLAGS(EFlags); \
567 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
568 \
569 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
570 IEM_MC_COMMIT_EFLAGS(EFlags); \
571 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
572 IEM_MC_END(); \
573 break; \
574 \
575 case IEMMODE_32BIT: \
576 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
577 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
578 IEM_MC_ARG(uint32_t, u32Src, 1); \
579 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
581 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
582 \
583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
584 IEMOP_HLP_DONE_DECODING(); \
585 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
586 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
587 IEM_MC_FETCH_EFLAGS(EFlags); \
588 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
589 \
590 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
591 IEM_MC_COMMIT_EFLAGS(EFlags); \
592 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
593 IEM_MC_END(); \
594 break; \
595 \
596 case IEMMODE_64BIT: \
597 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
598 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
599 IEM_MC_ARG(uint64_t, u64Src, 1); \
600 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
602 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
603 \
604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
605 IEMOP_HLP_DONE_DECODING(); \
606 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
607 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
608 IEM_MC_FETCH_EFLAGS(EFlags); \
609 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
610 \
611 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
612 IEM_MC_COMMIT_EFLAGS(EFlags); \
613 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
614 IEM_MC_END(); \
615 break; \
616 \
617 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
618 } \
619 } \
620 else \
621 { \
622 IEMOP_HLP_DONE_DECODING(); \
623 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
624 } \
625 } \
626 (void)0
627
628
629/**
630 * Body for instructions like ADD, AND, OR, ++ with working on AL with
631 * a byte immediate.
632 */
633#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
634 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
635 \
636 IEM_MC_BEGIN(3, 0, 0, 0); \
637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
638 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
639 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
640 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
641 \
642 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
643 IEM_MC_REF_EFLAGS(pEFlags); \
644 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
645 \
646 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
647 IEM_MC_END()
648
649/**
650 * Body for instructions like ADD, AND, OR, ++ with working on
651 * AX/EAX/RAX with a word/dword immediate.
652 */
653#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
654 switch (pVCpu->iem.s.enmEffOpSize) \
655 { \
656 case IEMMODE_16BIT: \
657 { \
658 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
659 \
660 IEM_MC_BEGIN(3, 0, 0, 0); \
661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
662 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
663 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
664 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
665 \
666 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
667 IEM_MC_REF_EFLAGS(pEFlags); \
668 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
669 \
670 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
671 IEM_MC_END(); \
672 } \
673 \
674 case IEMMODE_32BIT: \
675 { \
676 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
677 \
678 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
680 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
681 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
682 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
683 \
684 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
685 IEM_MC_REF_EFLAGS(pEFlags); \
686 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
687 \
688 if (a_fModifiesDstReg) \
689 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
690 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
691 IEM_MC_END(); \
692 } \
693 \
694 case IEMMODE_64BIT: \
695 { \
696 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
697 \
698 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
700 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
701 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
702 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
703 \
704 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
705 IEM_MC_REF_EFLAGS(pEFlags); \
706 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
707 \
708 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
709 IEM_MC_END(); \
710 } \
711 \
712 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
713 } \
714 (void)0
715
716
717
718/* Instruction specification format - work in progress: */
719
720/**
721 * @opcode 0x00
722 * @opmnemonic add
723 * @op1 rm:Eb
724 * @op2 reg:Gb
725 * @opmaps one
726 * @openc ModR/M
727 * @opflmodify cf,pf,af,zf,sf,of
728 * @ophints harmless ignores_op_sizes
729 * @opstats add_Eb_Gb
730 * @opgroup og_gen_arith_bin
731 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
732 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
733 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
734 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
735 */
736FNIEMOP_DEF(iemOp_add_Eb_Gb)
737{
738 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
739 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_add_u8);
740 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked);
741}
742
743
744/**
745 * @opcode 0x01
746 * @opgroup og_gen_arith_bin
747 * @opflmodify cf,pf,af,zf,sf,of
748 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
749 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
750 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
751 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
752 */
753FNIEMOP_DEF(iemOp_add_Ev_Gv)
754{
755 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
756 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
757 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
758}
759
760
761/**
762 * @opcode 0x02
763 * @opgroup og_gen_arith_bin
764 * @opflmodify cf,pf,af,zf,sf,of
765 * @opcopytests iemOp_add_Eb_Gb
766 */
767FNIEMOP_DEF(iemOp_add_Gb_Eb)
768{
769 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
770 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
771}
772
773
774/**
775 * @opcode 0x03
776 * @opgroup og_gen_arith_bin
777 * @opflmodify cf,pf,af,zf,sf,of
778 * @opcopytests iemOp_add_Ev_Gv
779 */
780FNIEMOP_DEF(iemOp_add_Gv_Ev)
781{
782 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
783 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1, 0);
784}
785
786
787/**
788 * @opcode 0x04
789 * @opgroup og_gen_arith_bin
790 * @opflmodify cf,pf,af,zf,sf,of
791 * @opcopytests iemOp_add_Eb_Gb
792 */
793FNIEMOP_DEF(iemOp_add_Al_Ib)
794{
795 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
796 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
797}
798
799
800/**
801 * @opcode 0x05
802 * @opgroup og_gen_arith_bin
803 * @opflmodify cf,pf,af,zf,sf,of
804 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
805 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
806 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
807 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
808 */
809FNIEMOP_DEF(iemOp_add_eAX_Iz)
810{
811 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
812 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
813}
814
815
816/**
817 * @opcode 0x06
818 * @opgroup og_stack_sreg
819 */
820FNIEMOP_DEF(iemOp_push_ES)
821{
822 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
823 IEMOP_HLP_NO_64BIT();
824 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
825}
826
827
828/**
829 * @opcode 0x07
830 * @opgroup og_stack_sreg
831 */
832FNIEMOP_DEF(iemOp_pop_ES)
833{
834 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
835 IEMOP_HLP_NO_64BIT();
836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
837 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
838 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
839 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
840 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
841 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES),
842 iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
843}
844
845
846/**
847 * @opcode 0x08
848 * @opgroup og_gen_arith_bin
849 * @opflmodify cf,pf,af,zf,sf,of
850 * @opflundef af
851 * @opflclear of,cf
852 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
853 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
854 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
855 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
856 */
857FNIEMOP_DEF(iemOp_or_Eb_Gb)
858{
859 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
860 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
861 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_or_u8);
862 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked);
863}
864
865
866/*
867 * @opcode 0x09
868 * @opgroup og_gen_arith_bin
869 * @opflmodify cf,pf,af,zf,sf,of
870 * @opflundef af
871 * @opflclear of,cf
872 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
873 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
874 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
875 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
876 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
877 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
878 */
879FNIEMOP_DEF(iemOp_or_Ev_Gv)
880{
881 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
882 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
883 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
884 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
885}
886
887
888/**
889 * @opcode 0x0a
890 * @opgroup og_gen_arith_bin
891 * @opflmodify cf,pf,af,zf,sf,of
892 * @opflundef af
893 * @opflclear of,cf
894 * @opcopytests iemOp_or_Eb_Gb
895 */
896FNIEMOP_DEF(iemOp_or_Gb_Eb)
897{
898 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
899 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
900 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
901}
902
903
904/**
905 * @opcode 0x0b
906 * @opgroup og_gen_arith_bin
907 * @opflmodify cf,pf,af,zf,sf,of
908 * @opflundef af
909 * @opflclear of,cf
910 * @opcopytests iemOp_or_Ev_Gv
911 */
912FNIEMOP_DEF(iemOp_or_Gv_Ev)
913{
914 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
915 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
916 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1, 0);
917}
918
919
920/**
921 * @opcode 0x0c
922 * @opgroup og_gen_arith_bin
923 * @opflmodify cf,pf,af,zf,sf,of
924 * @opflundef af
925 * @opflclear of,cf
926 * @opcopytests iemOp_or_Eb_Gb
927 */
928FNIEMOP_DEF(iemOp_or_Al_Ib)
929{
930 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
931 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
932 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
933}
934
935
936/**
937 * @opcode 0x0d
938 * @opgroup og_gen_arith_bin
939 * @opflmodify cf,pf,af,zf,sf,of
940 * @opflundef af
941 * @opflclear of,cf
942 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
943 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
944 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
945 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
946 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
947 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
948 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
949 */
950FNIEMOP_DEF(iemOp_or_eAX_Iz)
951{
952 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
953 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
954 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
955}
956
957
958/**
959 * @opcode 0x0e
960 * @opgroup og_stack_sreg
961 */
962FNIEMOP_DEF(iemOp_push_CS)
963{
964 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
965 IEMOP_HLP_NO_64BIT();
966 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
967}
968
969
970/**
971 * @opcode 0x0f
972 * @opmnemonic EscTwo0f
973 * @openc two0f
974 * @opdisenum OP_2B_ESC
975 * @ophints harmless
976 * @opgroup og_escapes
977 */
978FNIEMOP_DEF(iemOp_2byteEscape)
979{
980#if 0 /// @todo def VBOX_STRICT
981 /* Sanity check the table the first time around. */
982 static bool s_fTested = false;
983 if (RT_LIKELY(s_fTested)) { /* likely */ }
984 else
985 {
986 s_fTested = true;
987 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
988 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
989 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
990 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
991 }
992#endif
993
994 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
995 {
996 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
997 IEMOP_HLP_MIN_286();
998 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
999 }
1000 /* @opdone */
1001
1002 /*
1003 * On the 8086 this is a POP CS instruction.
1004 * For the time being we don't specify this this.
1005 */
1006 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
1007 IEMOP_HLP_NO_64BIT();
1008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1009 /** @todo eliminate END_TB here */
1010 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1011 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1012 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_CS),
1013 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1014}
1015
1016/**
1017 * @opcode 0x10
1018 * @opgroup og_gen_arith_bin
1019 * @opfltest cf
1020 * @opflmodify cf,pf,af,zf,sf,of
1021 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1022 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1023 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1024 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1025 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1026 */
1027FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1028{
1029 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1030 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_adc_u8);
1031 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked);
1032}
1033
1034
1035/**
1036 * @opcode 0x11
1037 * @opgroup og_gen_arith_bin
1038 * @opfltest cf
1039 * @opflmodify cf,pf,af,zf,sf,of
1040 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1041 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1042 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1043 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1044 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1045 */
1046FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1047{
1048 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1049 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
1050 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1051}
1052
1053
1054/**
1055 * @opcode 0x12
1056 * @opgroup og_gen_arith_bin
1057 * @opfltest cf
1058 * @opflmodify cf,pf,af,zf,sf,of
1059 * @opcopytests iemOp_adc_Eb_Gb
1060 */
1061FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1062{
1063 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1064 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
1065}
1066
1067
1068/**
1069 * @opcode 0x13
1070 * @opgroup og_gen_arith_bin
1071 * @opfltest cf
1072 * @opflmodify cf,pf,af,zf,sf,of
1073 * @opcopytests iemOp_adc_Ev_Gv
1074 */
1075FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1076{
1077 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1078 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1, 0);
1079}
1080
1081
1082/**
1083 * @opcode 0x14
1084 * @opgroup og_gen_arith_bin
1085 * @opfltest cf
1086 * @opflmodify cf,pf,af,zf,sf,of
1087 * @opcopytests iemOp_adc_Eb_Gb
1088 */
1089FNIEMOP_DEF(iemOp_adc_Al_Ib)
1090{
1091 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1092 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1093}
1094
1095
1096/**
1097 * @opcode 0x15
1098 * @opgroup og_gen_arith_bin
1099 * @opfltest cf
1100 * @opflmodify cf,pf,af,zf,sf,of
1101 * @opcopytests iemOp_adc_Ev_Gv
1102 */
1103FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1104{
1105 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1106 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1107}
1108
1109
1110/**
1111 * @opcode 0x16
1112 */
1113FNIEMOP_DEF(iemOp_push_SS)
1114{
1115 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1116 IEMOP_HLP_NO_64BIT();
1117 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1118}
1119
1120
1121/**
1122 * @opcode 0x17
1123 * @opgroup og_gen_arith_bin
1124 * @opfltest cf
1125 * @opflmodify cf,pf,af,zf,sf,of
1126 */
1127FNIEMOP_DEF(iemOp_pop_SS)
1128{
1129 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1131 IEMOP_HLP_NO_64BIT();
1132 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1133 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1134 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_SS)
1135 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_SS)
1136 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_SS),
1137 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1138}
1139
1140
1141/**
1142 * @opcode 0x18
1143 * @opgroup og_gen_arith_bin
1144 * @opfltest cf
1145 * @opflmodify cf,pf,af,zf,sf,of
1146 */
1147FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1148{
1149 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1150 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sbb_u8);
1151 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked);
1152}
1153
1154
1155/**
1156 * @opcode 0x19
1157 * @opgroup og_gen_arith_bin
1158 * @opfltest cf
1159 * @opflmodify cf,pf,af,zf,sf,of
1160 */
1161FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1162{
1163 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1164 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
1165 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1166}
1167
1168
1169/**
1170 * @opcode 0x1a
1171 * @opgroup og_gen_arith_bin
1172 * @opfltest cf
1173 * @opflmodify cf,pf,af,zf,sf,of
1174 */
1175FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1176{
1177 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1178 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
1179}
1180
1181
1182/**
1183 * @opcode 0x1b
1184 * @opgroup og_gen_arith_bin
1185 * @opfltest cf
1186 * @opflmodify cf,pf,af,zf,sf,of
1187 */
1188FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1189{
1190 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1191 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1, 0);
1192}
1193
1194
1195/**
1196 * @opcode 0x1c
1197 * @opgroup og_gen_arith_bin
1198 * @opfltest cf
1199 * @opflmodify cf,pf,af,zf,sf,of
1200 */
1201FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1202{
1203 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1204 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1205}
1206
1207
1208/**
1209 * @opcode 0x1d
1210 * @opgroup og_gen_arith_bin
1211 * @opfltest cf
1212 * @opflmodify cf,pf,af,zf,sf,of
1213 */
1214FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1215{
1216 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1217 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1218}
1219
1220
1221/**
1222 * @opcode 0x1e
1223 * @opgroup og_stack_sreg
1224 */
1225FNIEMOP_DEF(iemOp_push_DS)
1226{
1227 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1228 IEMOP_HLP_NO_64BIT();
1229 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1230}
1231
1232
1233/**
1234 * @opcode 0x1f
1235 * @opgroup og_stack_sreg
1236 */
1237FNIEMOP_DEF(iemOp_pop_DS)
1238{
1239 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1241 IEMOP_HLP_NO_64BIT();
1242 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1243 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1244 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
1245 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
1246 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS),
1247 iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1248}
1249
1250
1251/**
1252 * @opcode 0x20
1253 * @opgroup og_gen_arith_bin
1254 * @opflmodify cf,pf,af,zf,sf,of
1255 * @opflundef af
1256 * @opflclear of,cf
1257 */
1258FNIEMOP_DEF(iemOp_and_Eb_Gb)
1259{
1260 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1261 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1262 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_and_u8);
1263 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked);
1264}
1265
1266
1267/**
1268 * @opcode 0x21
1269 * @opgroup og_gen_arith_bin
1270 * @opflmodify cf,pf,af,zf,sf,of
1271 * @opflundef af
1272 * @opflclear of,cf
1273 */
1274FNIEMOP_DEF(iemOp_and_Ev_Gv)
1275{
1276 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1277 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1278 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
1279 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1280}
1281
1282
1283/**
1284 * @opcode 0x22
1285 * @opgroup og_gen_arith_bin
1286 * @opflmodify cf,pf,af,zf,sf,of
1287 * @opflundef af
1288 * @opflclear of,cf
1289 */
1290FNIEMOP_DEF(iemOp_and_Gb_Eb)
1291{
1292 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1293 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1294 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1295}
1296
1297
1298/**
1299 * @opcode 0x23
1300 * @opgroup og_gen_arith_bin
1301 * @opflmodify cf,pf,af,zf,sf,of
1302 * @opflundef af
1303 * @opflclear of,cf
1304 */
1305FNIEMOP_DEF(iemOp_and_Gv_Ev)
1306{
1307 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1308 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1309 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1, 0);
1310}
1311
1312
1313/**
1314 * @opcode 0x24
1315 * @opgroup og_gen_arith_bin
1316 * @opflmodify cf,pf,af,zf,sf,of
1317 * @opflundef af
1318 * @opflclear of,cf
1319 */
1320FNIEMOP_DEF(iemOp_and_Al_Ib)
1321{
1322 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1323 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1324 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1325}
1326
1327
1328/**
1329 * @opcode 0x25
1330 * @opgroup og_gen_arith_bin
1331 * @opflmodify cf,pf,af,zf,sf,of
1332 * @opflundef af
1333 * @opflclear of,cf
1334 */
1335FNIEMOP_DEF(iemOp_and_eAX_Iz)
1336{
1337 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1338 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1339 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1340}
1341
1342
1343/**
1344 * @opcode 0x26
1345 * @opmnemonic SEG
1346 * @op1 ES
1347 * @opgroup og_prefix
1348 * @openc prefix
1349 * @opdisenum OP_SEG
1350 * @ophints harmless
1351 */
1352FNIEMOP_DEF(iemOp_seg_ES)
1353{
1354 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1355 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1356 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1357
1358 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1359 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1360}
1361
1362
1363/**
1364 * @opcode 0x27
1365 * @opfltest af,cf
1366 * @opflmodify cf,pf,af,zf,sf,of
1367 * @opflundef of
1368 */
1369FNIEMOP_DEF(iemOp_daa)
1370{
1371 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1372 IEMOP_HLP_NO_64BIT();
1373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1374 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1375 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_daa);
1376}
1377
1378
1379/**
1380 * @opcode 0x28
1381 * @opgroup og_gen_arith_bin
1382 * @opflmodify cf,pf,af,zf,sf,of
1383 */
1384FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1385{
1386 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1387 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sub_u8);
1388 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked);
1389}
1390
1391
1392/**
1393 * @opcode 0x29
1394 * @opgroup og_gen_arith_bin
1395 * @opflmodify cf,pf,af,zf,sf,of
1396 */
1397FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1398{
1399 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1400 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
1401 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1402}
1403
1404
1405/**
1406 * @opcode 0x2a
1407 * @opgroup og_gen_arith_bin
1408 * @opflmodify cf,pf,af,zf,sf,of
1409 */
1410FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1411{
1412 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1413 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1414}
1415
1416
1417/**
1418 * @opcode 0x2b
1419 * @opgroup og_gen_arith_bin
1420 * @opflmodify cf,pf,af,zf,sf,of
1421 */
1422FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1423{
1424 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1425 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1, 0);
1426}
1427
1428
1429/**
1430 * @opcode 0x2c
1431 * @opgroup og_gen_arith_bin
1432 * @opflmodify cf,pf,af,zf,sf,of
1433 */
1434FNIEMOP_DEF(iemOp_sub_Al_Ib)
1435{
1436 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1437 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1438}
1439
1440
1441/**
1442 * @opcode 0x2d
1443 * @opgroup og_gen_arith_bin
1444 * @opflmodify cf,pf,af,zf,sf,of
1445 */
1446FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1447{
1448 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1449 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1450}
1451
1452
1453/**
1454 * @opcode 0x2e
1455 * @opmnemonic SEG
1456 * @op1 CS
1457 * @opgroup og_prefix
1458 * @openc prefix
1459 * @opdisenum OP_SEG
1460 * @ophints harmless
1461 */
1462FNIEMOP_DEF(iemOp_seg_CS)
1463{
1464 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1465 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1466 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1467
1468 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1469 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1470}
1471
1472
1473/**
1474 * @opcode 0x2f
1475 * @opfltest af,cf
1476 * @opflmodify cf,pf,af,zf,sf,of
1477 * @opflundef of
1478 */
1479FNIEMOP_DEF(iemOp_das)
1480{
1481 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1482 IEMOP_HLP_NO_64BIT();
1483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1484 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1485 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_das);
1486}
1487
1488
1489/**
1490 * @opcode 0x30
1491 * @opgroup og_gen_arith_bin
1492 * @opflmodify cf,pf,af,zf,sf,of
1493 * @opflundef af
1494 * @opflclear of,cf
1495 */
1496FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1497{
1498 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1499 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1500 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_xor_u8);
1501 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked);
1502}
1503
1504
1505/**
1506 * @opcode 0x31
1507 * @opgroup og_gen_arith_bin
1508 * @opflmodify cf,pf,af,zf,sf,of
1509 * @opflundef af
1510 * @opflclear of,cf
1511 */
1512FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1513{
1514 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1515 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1516 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
1517 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1518}
1519
1520
1521/**
1522 * @opcode 0x32
1523 * @opgroup og_gen_arith_bin
1524 * @opflmodify cf,pf,af,zf,sf,of
1525 * @opflundef af
1526 * @opflclear of,cf
1527 */
1528FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1529{
1530 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1531 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1532 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1533}
1534
1535
1536/**
1537 * @opcode 0x33
1538 * @opgroup og_gen_arith_bin
1539 * @opflmodify cf,pf,af,zf,sf,of
1540 * @opflundef af
1541 * @opflclear of,cf
1542 */
1543FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1544{
1545 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1546 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1547 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1, 0);
1548}
1549
1550
1551/**
1552 * @opcode 0x34
1553 * @opgroup og_gen_arith_bin
1554 * @opflmodify cf,pf,af,zf,sf,of
1555 * @opflundef af
1556 * @opflclear of,cf
1557 */
1558FNIEMOP_DEF(iemOp_xor_Al_Ib)
1559{
1560 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1561 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1562 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1563}
1564
1565
1566/**
1567 * @opcode 0x35
1568 * @opgroup og_gen_arith_bin
1569 * @opflmodify cf,pf,af,zf,sf,of
1570 * @opflundef af
1571 * @opflclear of,cf
1572 */
1573FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1574{
1575 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1576 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1577 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1578}
1579
1580
1581/**
1582 * @opcode 0x36
1583 * @opmnemonic SEG
1584 * @op1 SS
1585 * @opgroup og_prefix
1586 * @openc prefix
1587 * @opdisenum OP_SEG
1588 * @ophints harmless
1589 */
1590FNIEMOP_DEF(iemOp_seg_SS)
1591{
1592 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1593 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1594 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1595
1596 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1597 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1598}
1599
1600
1601/**
1602 * @opcode 0x37
1603 * @opfltest af,cf
1604 * @opflmodify cf,pf,af,zf,sf,of
1605 * @opflundef pf,zf,sf,of
1606 * @opgroup og_gen_arith_dec
1607 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1608 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1609 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1610 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1611 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1612 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1613 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1614 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1615 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1616 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1617 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1618 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1619 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1620 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1621 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1622 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1623 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1624 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1625 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1626 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1627 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1628 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1629 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1630 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1631 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1632 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1633 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1634 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1635 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1636 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1637 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1638 */
1639FNIEMOP_DEF(iemOp_aaa)
1640{
1641 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1642 IEMOP_HLP_NO_64BIT();
1643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1644 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1645
1646 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aaa);
1647}
1648
1649
1650/**
1651 * @opcode 0x38
1652 */
1653FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1654{
1655 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1656 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8);
1657 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
1658}
1659
1660
1661/**
1662 * @opcode 0x39
1663 */
1664FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1665{
1666 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1667 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
1668}
1669
1670
1671/**
1672 * @opcode 0x3a
1673 */
1674FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1675{
1676 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1677 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1678}
1679
1680
1681/**
1682 * @opcode 0x3b
1683 */
1684FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1685{
1686 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1687 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0, 0);
1688}
1689
1690
1691/**
1692 * @opcode 0x3c
1693 */
1694FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1695{
1696 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1697 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1698}
1699
1700
1701/**
1702 * @opcode 0x3d
1703 */
1704FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1705{
1706 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1707 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1708}
1709
1710
1711/**
1712 * @opcode 0x3e
1713 */
1714FNIEMOP_DEF(iemOp_seg_DS)
1715{
1716 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1717 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1718 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1719
1720 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1721 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1722}
1723
1724
1725/**
1726 * @opcode 0x3f
1727 * @opfltest af,cf
1728 * @opflmodify cf,pf,af,zf,sf,of
1729 * @opflundef pf,zf,sf,of
1730 * @opgroup og_gen_arith_dec
1731 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1732 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1733 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1734 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1735 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1736 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1737 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1738 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1739 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1740 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1741 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1742 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1743 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1744 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1745 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1746 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1747 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1748 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1749 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1750 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1751 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1752 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1753 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1754 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1755 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1756 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1757 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1758 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1759 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1760 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1761 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1762 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1763 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1764 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1765 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1766 */
1767FNIEMOP_DEF(iemOp_aas)
1768{
1769 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1770 IEMOP_HLP_NO_64BIT();
1771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1772 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1773
1774 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aas);
1775}
1776
1777
1778/**
1779 * Common 'inc/dec register' helper.
1780 *
1781 * Not for 64-bit code, only for what became the rex prefixes.
1782 */
1783#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1784 switch (pVCpu->iem.s.enmEffOpSize) \
1785 { \
1786 case IEMMODE_16BIT: \
1787 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT, 0); \
1788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1789 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1790 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1791 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1792 IEM_MC_REF_EFLAGS(pEFlags); \
1793 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1794 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1795 IEM_MC_END(); \
1796 break; \
1797 \
1798 case IEMMODE_32BIT: \
1799 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
1800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1801 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1802 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1803 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1804 IEM_MC_REF_EFLAGS(pEFlags); \
1805 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1806 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
1807 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1808 IEM_MC_END(); \
1809 break; \
1810 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1811 } \
1812 (void)0
1813
1814/**
1815 * @opcode 0x40
1816 */
1817FNIEMOP_DEF(iemOp_inc_eAX)
1818{
1819 /*
1820 * This is a REX prefix in 64-bit mode.
1821 */
1822 if (IEM_IS_64BIT_CODE(pVCpu))
1823 {
1824 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1825 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1826
1827 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1828 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1829 }
1830
1831 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1832 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1833}
1834
1835
1836/**
1837 * @opcode 0x41
1838 */
1839FNIEMOP_DEF(iemOp_inc_eCX)
1840{
1841 /*
1842 * This is a REX prefix in 64-bit mode.
1843 */
1844 if (IEM_IS_64BIT_CODE(pVCpu))
1845 {
1846 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1847 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1848 pVCpu->iem.s.uRexB = 1 << 3;
1849
1850 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1851 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1852 }
1853
1854 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1855 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1856}
1857
1858
1859/**
1860 * @opcode 0x42
1861 */
1862FNIEMOP_DEF(iemOp_inc_eDX)
1863{
1864 /*
1865 * This is a REX prefix in 64-bit mode.
1866 */
1867 if (IEM_IS_64BIT_CODE(pVCpu))
1868 {
1869 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1870 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1871 pVCpu->iem.s.uRexIndex = 1 << 3;
1872
1873 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1874 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1875 }
1876
1877 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1878 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1879}
1880
1881
1882
1883/**
1884 * @opcode 0x43
1885 */
1886FNIEMOP_DEF(iemOp_inc_eBX)
1887{
1888 /*
1889 * This is a REX prefix in 64-bit mode.
1890 */
1891 if (IEM_IS_64BIT_CODE(pVCpu))
1892 {
1893 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1894 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1895 pVCpu->iem.s.uRexB = 1 << 3;
1896 pVCpu->iem.s.uRexIndex = 1 << 3;
1897
1898 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1899 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1900 }
1901
1902 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1903 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1904}
1905
1906
1907/**
1908 * @opcode 0x44
1909 */
1910FNIEMOP_DEF(iemOp_inc_eSP)
1911{
1912 /*
1913 * This is a REX prefix in 64-bit mode.
1914 */
1915 if (IEM_IS_64BIT_CODE(pVCpu))
1916 {
1917 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1918 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1919 pVCpu->iem.s.uRexReg = 1 << 3;
1920
1921 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1922 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1923 }
1924
1925 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1926 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1927}
1928
1929
1930/**
1931 * @opcode 0x45
1932 */
1933FNIEMOP_DEF(iemOp_inc_eBP)
1934{
1935 /*
1936 * This is a REX prefix in 64-bit mode.
1937 */
1938 if (IEM_IS_64BIT_CODE(pVCpu))
1939 {
1940 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1941 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1942 pVCpu->iem.s.uRexReg = 1 << 3;
1943 pVCpu->iem.s.uRexB = 1 << 3;
1944
1945 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1946 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1947 }
1948
1949 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1950 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1951}
1952
1953
1954/**
1955 * @opcode 0x46
1956 */
1957FNIEMOP_DEF(iemOp_inc_eSI)
1958{
1959 /*
1960 * This is a REX prefix in 64-bit mode.
1961 */
1962 if (IEM_IS_64BIT_CODE(pVCpu))
1963 {
1964 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1965 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1966 pVCpu->iem.s.uRexReg = 1 << 3;
1967 pVCpu->iem.s.uRexIndex = 1 << 3;
1968
1969 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1970 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1971 }
1972
1973 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1974 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1975}
1976
1977
1978/**
1979 * @opcode 0x47
1980 */
1981FNIEMOP_DEF(iemOp_inc_eDI)
1982{
1983 /*
1984 * This is a REX prefix in 64-bit mode.
1985 */
1986 if (IEM_IS_64BIT_CODE(pVCpu))
1987 {
1988 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1989 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1990 pVCpu->iem.s.uRexReg = 1 << 3;
1991 pVCpu->iem.s.uRexB = 1 << 3;
1992 pVCpu->iem.s.uRexIndex = 1 << 3;
1993
1994 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1995 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1996 }
1997
1998 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1999 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
2000}
2001
2002
2003/**
2004 * @opcode 0x48
2005 */
2006FNIEMOP_DEF(iemOp_dec_eAX)
2007{
2008 /*
2009 * This is a REX prefix in 64-bit mode.
2010 */
2011 if (IEM_IS_64BIT_CODE(pVCpu))
2012 {
2013 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
2014 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
2015 iemRecalEffOpSize(pVCpu);
2016
2017 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2018 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2019 }
2020
2021 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2022 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2023}
2024
2025
2026/**
2027 * @opcode 0x49
2028 */
2029FNIEMOP_DEF(iemOp_dec_eCX)
2030{
2031 /*
2032 * This is a REX prefix in 64-bit mode.
2033 */
2034 if (IEM_IS_64BIT_CODE(pVCpu))
2035 {
2036 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2037 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2038 pVCpu->iem.s.uRexB = 1 << 3;
2039 iemRecalEffOpSize(pVCpu);
2040
2041 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2042 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2043 }
2044
2045 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2046 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2047}
2048
2049
2050/**
2051 * @opcode 0x4a
2052 */
2053FNIEMOP_DEF(iemOp_dec_eDX)
2054{
2055 /*
2056 * This is a REX prefix in 64-bit mode.
2057 */
2058 if (IEM_IS_64BIT_CODE(pVCpu))
2059 {
2060 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2061 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2062 pVCpu->iem.s.uRexIndex = 1 << 3;
2063 iemRecalEffOpSize(pVCpu);
2064
2065 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2066 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2067 }
2068
2069 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2070 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2071}
2072
2073
2074/**
2075 * @opcode 0x4b
2076 */
2077FNIEMOP_DEF(iemOp_dec_eBX)
2078{
2079 /*
2080 * This is a REX prefix in 64-bit mode.
2081 */
2082 if (IEM_IS_64BIT_CODE(pVCpu))
2083 {
2084 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2085 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2086 pVCpu->iem.s.uRexB = 1 << 3;
2087 pVCpu->iem.s.uRexIndex = 1 << 3;
2088 iemRecalEffOpSize(pVCpu);
2089
2090 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2091 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2092 }
2093
2094 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2095 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2096}
2097
2098
2099/**
2100 * @opcode 0x4c
2101 */
2102FNIEMOP_DEF(iemOp_dec_eSP)
2103{
2104 /*
2105 * This is a REX prefix in 64-bit mode.
2106 */
2107 if (IEM_IS_64BIT_CODE(pVCpu))
2108 {
2109 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2110 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2111 pVCpu->iem.s.uRexReg = 1 << 3;
2112 iemRecalEffOpSize(pVCpu);
2113
2114 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2115 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2116 }
2117
2118 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2119 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2120}
2121
2122
2123/**
2124 * @opcode 0x4d
2125 */
2126FNIEMOP_DEF(iemOp_dec_eBP)
2127{
2128 /*
2129 * This is a REX prefix in 64-bit mode.
2130 */
2131 if (IEM_IS_64BIT_CODE(pVCpu))
2132 {
2133 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2134 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2135 pVCpu->iem.s.uRexReg = 1 << 3;
2136 pVCpu->iem.s.uRexB = 1 << 3;
2137 iemRecalEffOpSize(pVCpu);
2138
2139 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2140 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2141 }
2142
2143 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2144 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2145}
2146
2147
2148/**
2149 * @opcode 0x4e
2150 */
2151FNIEMOP_DEF(iemOp_dec_eSI)
2152{
2153 /*
2154 * This is a REX prefix in 64-bit mode.
2155 */
2156 if (IEM_IS_64BIT_CODE(pVCpu))
2157 {
2158 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2159 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2160 pVCpu->iem.s.uRexReg = 1 << 3;
2161 pVCpu->iem.s.uRexIndex = 1 << 3;
2162 iemRecalEffOpSize(pVCpu);
2163
2164 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2165 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2166 }
2167
2168 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2169 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2170}
2171
2172
2173/**
2174 * @opcode 0x4f
2175 */
2176FNIEMOP_DEF(iemOp_dec_eDI)
2177{
2178 /*
2179 * This is a REX prefix in 64-bit mode.
2180 */
2181 if (IEM_IS_64BIT_CODE(pVCpu))
2182 {
2183 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2184 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2185 pVCpu->iem.s.uRexReg = 1 << 3;
2186 pVCpu->iem.s.uRexB = 1 << 3;
2187 pVCpu->iem.s.uRexIndex = 1 << 3;
2188 iemRecalEffOpSize(pVCpu);
2189
2190 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2191 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2192 }
2193
2194 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2195 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2196}
2197
2198
2199/**
2200 * Common 'push register' helper.
2201 */
2202FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2203{
2204 if (IEM_IS_64BIT_CODE(pVCpu))
2205 {
2206 iReg |= pVCpu->iem.s.uRexB;
2207 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2208 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2209 }
2210
2211 switch (pVCpu->iem.s.enmEffOpSize)
2212 {
2213 case IEMMODE_16BIT:
2214 IEM_MC_BEGIN(0, 1, 0, 0);
2215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2216 IEM_MC_LOCAL(uint16_t, u16Value);
2217 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2218 IEM_MC_PUSH_U16(u16Value);
2219 IEM_MC_ADVANCE_RIP_AND_FINISH();
2220 IEM_MC_END();
2221 break;
2222
2223 case IEMMODE_32BIT:
2224 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2226 IEM_MC_LOCAL(uint32_t, u32Value);
2227 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2228 IEM_MC_PUSH_U32(u32Value);
2229 IEM_MC_ADVANCE_RIP_AND_FINISH();
2230 IEM_MC_END();
2231 break;
2232
2233 case IEMMODE_64BIT:
2234 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2236 IEM_MC_LOCAL(uint64_t, u64Value);
2237 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2238 IEM_MC_PUSH_U64(u64Value);
2239 IEM_MC_ADVANCE_RIP_AND_FINISH();
2240 IEM_MC_END();
2241 break;
2242
2243 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2244 }
2245}
2246
2247
2248/**
2249 * @opcode 0x50
2250 */
2251FNIEMOP_DEF(iemOp_push_eAX)
2252{
2253 IEMOP_MNEMONIC(push_rAX, "push rAX");
2254 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2255}
2256
2257
2258/**
2259 * @opcode 0x51
2260 */
2261FNIEMOP_DEF(iemOp_push_eCX)
2262{
2263 IEMOP_MNEMONIC(push_rCX, "push rCX");
2264 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2265}
2266
2267
2268/**
2269 * @opcode 0x52
2270 */
2271FNIEMOP_DEF(iemOp_push_eDX)
2272{
2273 IEMOP_MNEMONIC(push_rDX, "push rDX");
2274 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2275}
2276
2277
2278/**
2279 * @opcode 0x53
2280 */
2281FNIEMOP_DEF(iemOp_push_eBX)
2282{
2283 IEMOP_MNEMONIC(push_rBX, "push rBX");
2284 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2285}
2286
2287
2288/**
2289 * @opcode 0x54
2290 */
2291FNIEMOP_DEF(iemOp_push_eSP)
2292{
2293 IEMOP_MNEMONIC(push_rSP, "push rSP");
2294 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
2295 {
2296 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086, 0);
2297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2298 IEM_MC_LOCAL(uint16_t, u16Value);
2299 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2300 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2301 IEM_MC_PUSH_U16(u16Value);
2302 IEM_MC_ADVANCE_RIP_AND_FINISH();
2303 IEM_MC_END();
2304 }
2305 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2306}
2307
2308
2309/**
2310 * @opcode 0x55
2311 */
2312FNIEMOP_DEF(iemOp_push_eBP)
2313{
2314 IEMOP_MNEMONIC(push_rBP, "push rBP");
2315 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2316}
2317
2318
2319/**
2320 * @opcode 0x56
2321 */
2322FNIEMOP_DEF(iemOp_push_eSI)
2323{
2324 IEMOP_MNEMONIC(push_rSI, "push rSI");
2325 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2326}
2327
2328
2329/**
2330 * @opcode 0x57
2331 */
2332FNIEMOP_DEF(iemOp_push_eDI)
2333{
2334 IEMOP_MNEMONIC(push_rDI, "push rDI");
2335 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2336}
2337
2338
2339/**
2340 * Common 'pop register' helper.
2341 */
2342FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2343{
2344 if (IEM_IS_64BIT_CODE(pVCpu))
2345 {
2346 iReg |= pVCpu->iem.s.uRexB;
2347 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2348 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2349 }
2350
2351 switch (pVCpu->iem.s.enmEffOpSize)
2352 {
2353 case IEMMODE_16BIT:
2354 IEM_MC_BEGIN(0, 1, 0, 0);
2355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2356 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2357 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
2358 IEM_MC_POP_U16(pu16Dst);
2359 IEM_MC_ADVANCE_RIP_AND_FINISH();
2360 IEM_MC_END();
2361 break;
2362
2363 case IEMMODE_32BIT:
2364 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2366 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2367 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
2368 IEM_MC_POP_U32(pu32Dst);
2369 IEM_MC_CLEAR_HIGH_GREG_U64(iReg); /** @todo testcase*/
2370 IEM_MC_ADVANCE_RIP_AND_FINISH();
2371 IEM_MC_END();
2372 break;
2373
2374 case IEMMODE_64BIT:
2375 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2377 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2378 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
2379 IEM_MC_POP_U64(pu64Dst);
2380 IEM_MC_ADVANCE_RIP_AND_FINISH();
2381 IEM_MC_END();
2382 break;
2383
2384 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2385 }
2386}
2387
2388
2389/**
2390 * @opcode 0x58
2391 */
2392FNIEMOP_DEF(iemOp_pop_eAX)
2393{
2394 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2395 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2396}
2397
2398
2399/**
2400 * @opcode 0x59
2401 */
2402FNIEMOP_DEF(iemOp_pop_eCX)
2403{
2404 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2405 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2406}
2407
2408
2409/**
2410 * @opcode 0x5a
2411 */
2412FNIEMOP_DEF(iemOp_pop_eDX)
2413{
2414 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2415 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2416}
2417
2418
2419/**
2420 * @opcode 0x5b
2421 */
2422FNIEMOP_DEF(iemOp_pop_eBX)
2423{
2424 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2425 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2426}
2427
2428
2429/**
2430 * @opcode 0x5c
2431 */
2432FNIEMOP_DEF(iemOp_pop_eSP)
2433{
2434 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2435 if (IEM_IS_64BIT_CODE(pVCpu))
2436 {
2437 if (pVCpu->iem.s.uRexB)
2438 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2439 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2440 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2441 }
2442
2443 /** @todo add testcase for this instruction. */
2444 switch (pVCpu->iem.s.enmEffOpSize)
2445 {
2446 case IEMMODE_16BIT:
2447 IEM_MC_BEGIN(0, 1, 0, 0);
2448 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2449 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2450 IEM_MC_LOCAL(uint16_t, u16Dst);
2451 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
2452 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
2453 IEM_MC_ADVANCE_RIP_AND_FINISH();
2454 IEM_MC_END();
2455 break;
2456
2457 case IEMMODE_32BIT:
2458 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
2459 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2460 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2461 IEM_MC_LOCAL(uint32_t, u32Dst);
2462 IEM_MC_POP_U32(&u32Dst);
2463 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
2464 IEM_MC_ADVANCE_RIP_AND_FINISH();
2465 IEM_MC_END();
2466 break;
2467
2468 case IEMMODE_64BIT:
2469 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2470 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2471 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2472 IEM_MC_LOCAL(uint64_t, u64Dst);
2473 IEM_MC_POP_U64(&u64Dst);
2474 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
2475 IEM_MC_ADVANCE_RIP_AND_FINISH();
2476 IEM_MC_END();
2477 break;
2478
2479 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2480 }
2481}
2482
2483
2484/**
2485 * @opcode 0x5d
2486 */
2487FNIEMOP_DEF(iemOp_pop_eBP)
2488{
2489 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2490 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2491}
2492
2493
2494/**
2495 * @opcode 0x5e
2496 */
2497FNIEMOP_DEF(iemOp_pop_eSI)
2498{
2499 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2500 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2501}
2502
2503
2504/**
2505 * @opcode 0x5f
2506 */
2507FNIEMOP_DEF(iemOp_pop_eDI)
2508{
2509 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2510 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2511}
2512
2513
2514/**
2515 * @opcode 0x60
2516 */
2517FNIEMOP_DEF(iemOp_pusha)
2518{
2519 IEMOP_MNEMONIC(pusha, "pusha");
2520 IEMOP_HLP_MIN_186();
2521 IEMOP_HLP_NO_64BIT();
2522 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2523 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_16);
2524 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2525 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_32);
2526}
2527
2528
2529/**
2530 * @opcode 0x61
2531 */
2532FNIEMOP_DEF(iemOp_popa__mvex)
2533{
2534 if (!IEM_IS_64BIT_CODE(pVCpu))
2535 {
2536 IEMOP_MNEMONIC(popa, "popa");
2537 IEMOP_HLP_MIN_186();
2538 IEMOP_HLP_NO_64BIT();
2539 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2540 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2541 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2542 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2543 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2544 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2545 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2546 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2547 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2548 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2549 iemCImpl_popa_16);
2550 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2551 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2552 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2553 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2554 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2555 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2556 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2557 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2558 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2559 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2560 iemCImpl_popa_32);
2561 }
2562 IEMOP_MNEMONIC(mvex, "mvex");
2563 Log(("mvex prefix is not supported!\n"));
2564 IEMOP_RAISE_INVALID_OPCODE_RET();
2565}
2566
2567
2568/**
2569 * @opcode 0x62
2570 * @opmnemonic bound
2571 * @op1 Gv_RO
2572 * @op2 Ma
2573 * @opmincpu 80186
2574 * @ophints harmless x86_invalid_64
2575 * @optest op1=0 op2=0 ->
2576 * @optest op1=1 op2=0 -> value.xcpt=5
2577 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2578 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2579 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2580 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2581 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2582 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2583 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2584 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2585 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2586 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2587 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2588 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2589 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2590 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2591 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2592 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2593 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2594 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2595 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2596 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2597 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2598 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2599 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2600 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2601 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2602 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2603 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2604 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2605 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2606 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2607 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2608 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2609 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2610 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2611 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2612 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2613 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2614 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2615 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2616 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2617 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2618 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2619 */
2620FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2621{
2622 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2623 compatability mode it is invalid with MOD=3.
2624
2625 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2626 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2627 given as R and X without an exact description, so we assume it builds on
2628 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2629 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2630 uint8_t bRm;
2631 if (!IEM_IS_64BIT_CODE(pVCpu))
2632 {
2633 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2634 IEMOP_HLP_MIN_186();
2635 IEM_OPCODE_GET_NEXT_U8(&bRm);
2636 if (IEM_IS_MODRM_MEM_MODE(bRm))
2637 {
2638 /** @todo testcase: check that there are two memory accesses involved. Check
2639 * whether they're both read before the \#BR triggers. */
2640 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2641 {
2642 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
2643 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2644 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2645 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2647
2648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2650
2651 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2652 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2653 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2654
2655 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2656 IEM_MC_END();
2657 }
2658 else /* 32-bit operands */
2659 {
2660 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2661 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2662 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2663 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2665
2666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2668
2669 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2670 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2671 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2672
2673 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2674 IEM_MC_END();
2675 }
2676 }
2677
2678 /*
2679 * @opdone
2680 */
2681 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2682 {
2683 /* Note that there is no need for the CPU to fetch further bytes
2684 here because MODRM.MOD == 3. */
2685 Log(("evex not supported by the guest CPU!\n"));
2686 IEMOP_RAISE_INVALID_OPCODE_RET();
2687 }
2688 }
2689 else
2690 {
2691 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2692 * does modr/m read, whereas AMD probably doesn't... */
2693 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2694 {
2695 Log(("evex not supported by the guest CPU!\n"));
2696 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2697 }
2698 IEM_OPCODE_GET_NEXT_U8(&bRm);
2699 }
2700
2701 IEMOP_MNEMONIC(evex, "evex");
2702 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2703 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2704 Log(("evex prefix is not implemented!\n"));
2705 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2706}
2707
2708
2709/** Opcode 0x63 - non-64-bit modes. */
2710FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2711{
2712 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2713 IEMOP_HLP_MIN_286();
2714 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2716
2717 if (IEM_IS_MODRM_REG_MODE(bRm))
2718 {
2719 /* Register */
2720 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2721 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2722 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2723 IEM_MC_ARG(uint16_t, u16Src, 1);
2724 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2725
2726 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2727 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2728 IEM_MC_REF_EFLAGS(pEFlags);
2729 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2730
2731 IEM_MC_ADVANCE_RIP_AND_FINISH();
2732 IEM_MC_END();
2733 }
2734 else
2735 {
2736 /* Memory */
2737 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2738 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2739 IEM_MC_ARG(uint16_t, u16Src, 1);
2740 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2742 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2743
2744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2745 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2746 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2747 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2748 IEM_MC_FETCH_EFLAGS(EFlags);
2749 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2750
2751 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
2752 IEM_MC_COMMIT_EFLAGS(EFlags);
2753 IEM_MC_ADVANCE_RIP_AND_FINISH();
2754 IEM_MC_END();
2755 }
2756}
2757
2758
2759/**
2760 * @opcode 0x63
2761 *
2762 * @note This is a weird one. It works like a regular move instruction if
2763 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2764 * @todo This definitely needs a testcase to verify the odd cases. */
2765FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2766{
2767 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2768
2769 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2771
2772 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2773 {
2774 if (IEM_IS_MODRM_REG_MODE(bRm))
2775 {
2776 /*
2777 * Register to register.
2778 */
2779 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2781 IEM_MC_LOCAL(uint64_t, u64Value);
2782 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2783 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2784 IEM_MC_ADVANCE_RIP_AND_FINISH();
2785 IEM_MC_END();
2786 }
2787 else
2788 {
2789 /*
2790 * We're loading a register from memory.
2791 */
2792 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
2793 IEM_MC_LOCAL(uint64_t, u64Value);
2794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2797 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2798 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2799 IEM_MC_ADVANCE_RIP_AND_FINISH();
2800 IEM_MC_END();
2801 }
2802 }
2803 else
2804 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2805}
2806
2807
2808/**
2809 * @opcode 0x64
2810 * @opmnemonic segfs
2811 * @opmincpu 80386
2812 * @opgroup og_prefixes
2813 */
2814FNIEMOP_DEF(iemOp_seg_FS)
2815{
2816 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2817 IEMOP_HLP_MIN_386();
2818
2819 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2820 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2821
2822 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2823 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2824}
2825
2826
2827/**
2828 * @opcode 0x65
2829 * @opmnemonic seggs
2830 * @opmincpu 80386
2831 * @opgroup og_prefixes
2832 */
2833FNIEMOP_DEF(iemOp_seg_GS)
2834{
2835 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2836 IEMOP_HLP_MIN_386();
2837
2838 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2839 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2840
2841 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2842 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2843}
2844
2845
2846/**
2847 * @opcode 0x66
2848 * @opmnemonic opsize
2849 * @openc prefix
2850 * @opmincpu 80386
2851 * @ophints harmless
2852 * @opgroup og_prefixes
2853 */
2854FNIEMOP_DEF(iemOp_op_size)
2855{
2856 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2857 IEMOP_HLP_MIN_386();
2858
2859 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2860 iemRecalEffOpSize(pVCpu);
2861
2862 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2863 when REPZ or REPNZ are present. */
2864 if (pVCpu->iem.s.idxPrefix == 0)
2865 pVCpu->iem.s.idxPrefix = 1;
2866
2867 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2868 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2869}
2870
2871
2872/**
2873 * @opcode 0x67
2874 * @opmnemonic addrsize
2875 * @openc prefix
2876 * @opmincpu 80386
2877 * @ophints harmless
2878 * @opgroup og_prefixes
2879 */
2880FNIEMOP_DEF(iemOp_addr_size)
2881{
2882 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2883 IEMOP_HLP_MIN_386();
2884
2885 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2886 switch (pVCpu->iem.s.enmDefAddrMode)
2887 {
2888 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2889 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2890 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2891 default: AssertFailed();
2892 }
2893
2894 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2895 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2896}
2897
2898
2899/**
2900 * @opcode 0x68
2901 */
2902FNIEMOP_DEF(iemOp_push_Iz)
2903{
2904 IEMOP_MNEMONIC(push_Iz, "push Iz");
2905 IEMOP_HLP_MIN_186();
2906 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2907 switch (pVCpu->iem.s.enmEffOpSize)
2908 {
2909 case IEMMODE_16BIT:
2910 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
2911 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2913 IEM_MC_PUSH_U16(u16Imm);
2914 IEM_MC_ADVANCE_RIP_AND_FINISH();
2915 IEM_MC_END();
2916 break;
2917
2918 case IEMMODE_32BIT:
2919 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2920 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2922 IEM_MC_PUSH_U32(u32Imm);
2923 IEM_MC_ADVANCE_RIP_AND_FINISH();
2924 IEM_MC_END();
2925 break;
2926
2927 case IEMMODE_64BIT:
2928 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
2929 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2931 IEM_MC_PUSH_U64(u64Imm);
2932 IEM_MC_ADVANCE_RIP_AND_FINISH();
2933 IEM_MC_END();
2934 break;
2935
2936 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2937 }
2938}
2939
2940
2941/**
2942 * @opcode 0x69
2943 */
2944FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2945{
2946 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2947 IEMOP_HLP_MIN_186();
2948 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2949 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2950
2951 switch (pVCpu->iem.s.enmEffOpSize)
2952 {
2953 case IEMMODE_16BIT:
2954 {
2955 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2956 if (IEM_IS_MODRM_REG_MODE(bRm))
2957 {
2958 /* register operand */
2959 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2960 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
2961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2962 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2963 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2964 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2965 IEM_MC_LOCAL(uint16_t, u16Tmp);
2966
2967 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2968 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2969 IEM_MC_REF_EFLAGS(pEFlags);
2970 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2971 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2972
2973 IEM_MC_ADVANCE_RIP_AND_FINISH();
2974 IEM_MC_END();
2975 }
2976 else
2977 {
2978 /* memory operand */
2979 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
2980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2981 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2982
2983 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2985
2986 IEM_MC_LOCAL(uint16_t, u16Tmp);
2987 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2988
2989 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2990 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
2991 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2992 IEM_MC_REF_EFLAGS(pEFlags);
2993 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2994 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2995
2996 IEM_MC_ADVANCE_RIP_AND_FINISH();
2997 IEM_MC_END();
2998 }
2999 break;
3000 }
3001
3002 case IEMMODE_32BIT:
3003 {
3004 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3005 if (IEM_IS_MODRM_REG_MODE(bRm))
3006 {
3007 /* register operand */
3008 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3009 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3011 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3012 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
3013 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3014 IEM_MC_LOCAL(uint32_t, u32Tmp);
3015
3016 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3017 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3018 IEM_MC_REF_EFLAGS(pEFlags);
3019 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3020 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3021
3022 IEM_MC_ADVANCE_RIP_AND_FINISH();
3023 IEM_MC_END();
3024 }
3025 else
3026 {
3027 /* memory operand */
3028 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3029 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3031
3032 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3034
3035 IEM_MC_LOCAL(uint32_t, u32Tmp);
3036 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3037
3038 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3039 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3040 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3041 IEM_MC_REF_EFLAGS(pEFlags);
3042 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3043 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3044
3045 IEM_MC_ADVANCE_RIP_AND_FINISH();
3046 IEM_MC_END();
3047 }
3048 break;
3049 }
3050
3051 case IEMMODE_64BIT:
3052 {
3053 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3054 if (IEM_IS_MODRM_REG_MODE(bRm))
3055 {
3056 /* register operand */
3057 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3058 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3060 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3061 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
3062 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3063 IEM_MC_LOCAL(uint64_t, u64Tmp);
3064
3065 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3066 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3067 IEM_MC_REF_EFLAGS(pEFlags);
3068 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3069 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3070
3071 IEM_MC_ADVANCE_RIP_AND_FINISH();
3072 IEM_MC_END();
3073 }
3074 else
3075 {
3076 /* memory operand */
3077 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3080
3081 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /* parameter count for the threaded function for this block. */
3083
3084 IEM_MC_LOCAL(uint64_t, u64Tmp);
3085 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3086
3087 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3088 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int32_t)u32Imm, 1);
3089 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3090 IEM_MC_REF_EFLAGS(pEFlags);
3091 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3092 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3093
3094 IEM_MC_ADVANCE_RIP_AND_FINISH();
3095 IEM_MC_END();
3096 }
3097 break;
3098 }
3099
3100 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3101 }
3102}
3103
3104
3105/**
3106 * @opcode 0x6a
3107 */
3108FNIEMOP_DEF(iemOp_push_Ib)
3109{
3110 IEMOP_MNEMONIC(push_Ib, "push Ib");
3111 IEMOP_HLP_MIN_186();
3112 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3113 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3114
3115 switch (pVCpu->iem.s.enmEffOpSize)
3116 {
3117 case IEMMODE_16BIT:
3118 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
3119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3120 IEM_MC_PUSH_U16(i8Imm);
3121 IEM_MC_ADVANCE_RIP_AND_FINISH();
3122 IEM_MC_END();
3123 break;
3124 case IEMMODE_32BIT:
3125 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3127 IEM_MC_PUSH_U32(i8Imm);
3128 IEM_MC_ADVANCE_RIP_AND_FINISH();
3129 IEM_MC_END();
3130 break;
3131 case IEMMODE_64BIT:
3132 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
3133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3134 IEM_MC_PUSH_U64(i8Imm);
3135 IEM_MC_ADVANCE_RIP_AND_FINISH();
3136 IEM_MC_END();
3137 break;
3138 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3139 }
3140}
3141
3142
3143/**
3144 * @opcode 0x6b
3145 */
3146FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3147{
3148 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3149 IEMOP_HLP_MIN_186();
3150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3151 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3152
3153 switch (pVCpu->iem.s.enmEffOpSize)
3154 {
3155 case IEMMODE_16BIT:
3156 {
3157 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3158 if (IEM_IS_MODRM_REG_MODE(bRm))
3159 {
3160 /* register operand */
3161 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3162 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3164 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3165 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3166 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3167 IEM_MC_LOCAL(uint16_t, u16Tmp);
3168
3169 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3170 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3171 IEM_MC_REF_EFLAGS(pEFlags);
3172 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3173 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3174
3175 IEM_MC_ADVANCE_RIP_AND_FINISH();
3176 IEM_MC_END();
3177 }
3178 else
3179 {
3180 /* memory operand */
3181 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3182
3183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3185
3186 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3188
3189 IEM_MC_LOCAL(uint16_t, u16Tmp);
3190 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3191
3192 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3193 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3194 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3195 IEM_MC_REF_EFLAGS(pEFlags);
3196 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3197 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3198
3199 IEM_MC_ADVANCE_RIP_AND_FINISH();
3200 IEM_MC_END();
3201 }
3202 break;
3203 }
3204
3205 case IEMMODE_32BIT:
3206 {
3207 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3208 if (IEM_IS_MODRM_REG_MODE(bRm))
3209 {
3210 /* register operand */
3211 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3212 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3214 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3215 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3216 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3217 IEM_MC_LOCAL(uint32_t, u32Tmp);
3218
3219 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3220 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3221 IEM_MC_REF_EFLAGS(pEFlags);
3222 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3223 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3224
3225 IEM_MC_ADVANCE_RIP_AND_FINISH();
3226 IEM_MC_END();
3227 }
3228 else
3229 {
3230 /* memory operand */
3231 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3234
3235 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3237
3238 IEM_MC_LOCAL(uint32_t, u32Tmp);
3239 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3240
3241 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3242 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3243 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3244 IEM_MC_REF_EFLAGS(pEFlags);
3245 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3246 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3247
3248 IEM_MC_ADVANCE_RIP_AND_FINISH();
3249 IEM_MC_END();
3250 }
3251 break;
3252 }
3253
3254 case IEMMODE_64BIT:
3255 {
3256 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3257 if (IEM_IS_MODRM_REG_MODE(bRm))
3258 {
3259 /* register operand */
3260 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3261 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3263 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3264 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3265 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3266 IEM_MC_LOCAL(uint64_t, u64Tmp);
3267
3268 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3269 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3270 IEM_MC_REF_EFLAGS(pEFlags);
3271 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3272 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3273
3274 IEM_MC_ADVANCE_RIP_AND_FINISH();
3275 IEM_MC_END();
3276 }
3277 else
3278 {
3279 /* memory operand */
3280 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3283
3284 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the threaded parameter count. */
3285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3286
3287 IEM_MC_LOCAL(uint64_t, u64Tmp);
3288 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3289
3290 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3291 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3292 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3293 IEM_MC_REF_EFLAGS(pEFlags);
3294 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3295 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3296
3297 IEM_MC_ADVANCE_RIP_AND_FINISH();
3298 IEM_MC_END();
3299 }
3300 break;
3301 }
3302
3303 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3304 }
3305}
3306
3307
3308/**
3309 * @opcode 0x6c
3310 */
3311FNIEMOP_DEF(iemOp_insb_Yb_DX)
3312{
3313 IEMOP_HLP_MIN_186();
3314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3315 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3316 {
3317 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3318 switch (pVCpu->iem.s.enmEffAddrMode)
3319 {
3320 case IEMMODE_16BIT:
3321 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3322 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3323 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3324 iemCImpl_rep_ins_op8_addr16, false);
3325 case IEMMODE_32BIT:
3326 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3327 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3328 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3329 iemCImpl_rep_ins_op8_addr32, false);
3330 case IEMMODE_64BIT:
3331 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3332 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3333 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3334 iemCImpl_rep_ins_op8_addr64, false);
3335 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3336 }
3337 }
3338 else
3339 {
3340 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3341 switch (pVCpu->iem.s.enmEffAddrMode)
3342 {
3343 case IEMMODE_16BIT:
3344 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3345 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3346 iemCImpl_ins_op8_addr16, false);
3347 case IEMMODE_32BIT:
3348 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3349 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3350 iemCImpl_ins_op8_addr32, false);
3351 case IEMMODE_64BIT:
3352 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3353 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3354 iemCImpl_ins_op8_addr64, false);
3355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3356 }
3357 }
3358}
3359
3360
3361/**
3362 * @opcode 0x6d
3363 */
3364FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3365{
3366 IEMOP_HLP_MIN_186();
3367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3368 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3369 {
3370 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3371 switch (pVCpu->iem.s.enmEffOpSize)
3372 {
3373 case IEMMODE_16BIT:
3374 switch (pVCpu->iem.s.enmEffAddrMode)
3375 {
3376 case IEMMODE_16BIT:
3377 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3378 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3379 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3380 iemCImpl_rep_ins_op16_addr16, false);
3381 case IEMMODE_32BIT:
3382 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3383 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3384 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3385 iemCImpl_rep_ins_op16_addr32, false);
3386 case IEMMODE_64BIT:
3387 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3388 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3389 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3390 iemCImpl_rep_ins_op16_addr64, false);
3391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3392 }
3393 break;
3394 case IEMMODE_64BIT:
3395 case IEMMODE_32BIT:
3396 switch (pVCpu->iem.s.enmEffAddrMode)
3397 {
3398 case IEMMODE_16BIT:
3399 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3400 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3401 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3402 iemCImpl_rep_ins_op32_addr16, false);
3403 case IEMMODE_32BIT:
3404 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3405 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3406 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3407 iemCImpl_rep_ins_op32_addr32, false);
3408 case IEMMODE_64BIT:
3409 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3410 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3411 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3412 iemCImpl_rep_ins_op32_addr64, false);
3413 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3414 }
3415 break;
3416 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3417 }
3418 }
3419 else
3420 {
3421 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3422 switch (pVCpu->iem.s.enmEffOpSize)
3423 {
3424 case IEMMODE_16BIT:
3425 switch (pVCpu->iem.s.enmEffAddrMode)
3426 {
3427 case IEMMODE_16BIT:
3428 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3429 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3430 iemCImpl_ins_op16_addr16, false);
3431 case IEMMODE_32BIT:
3432 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3433 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3434 iemCImpl_ins_op16_addr32, false);
3435 case IEMMODE_64BIT:
3436 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3437 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3438 iemCImpl_ins_op16_addr64, false);
3439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3440 }
3441 break;
3442 case IEMMODE_64BIT:
3443 case IEMMODE_32BIT:
3444 switch (pVCpu->iem.s.enmEffAddrMode)
3445 {
3446 case IEMMODE_16BIT:
3447 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3448 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3449 iemCImpl_ins_op32_addr16, false);
3450 case IEMMODE_32BIT:
3451 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3452 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3453 iemCImpl_ins_op32_addr32, false);
3454 case IEMMODE_64BIT:
3455 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3456 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3457 iemCImpl_ins_op32_addr64, false);
3458 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3459 }
3460 break;
3461 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3462 }
3463 }
3464}
3465
3466
3467/**
3468 * @opcode 0x6e
3469 */
3470FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3471{
3472 IEMOP_HLP_MIN_186();
3473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3474 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3475 {
3476 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3477 switch (pVCpu->iem.s.enmEffAddrMode)
3478 {
3479 case IEMMODE_16BIT:
3480 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3481 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3482 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3483 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3484 case IEMMODE_32BIT:
3485 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3486 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3487 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3488 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3489 case IEMMODE_64BIT:
3490 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3491 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3492 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3493 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3494 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3495 }
3496 }
3497 else
3498 {
3499 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3500 switch (pVCpu->iem.s.enmEffAddrMode)
3501 {
3502 case IEMMODE_16BIT:
3503 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3504 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3505 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3506 case IEMMODE_32BIT:
3507 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3508 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3509 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3510 case IEMMODE_64BIT:
3511 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3512 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3513 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3515 }
3516 }
3517}
3518
3519
3520/**
3521 * @opcode 0x6f
3522 */
3523FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3524{
3525 IEMOP_HLP_MIN_186();
3526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3527 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3528 {
3529 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3530 switch (pVCpu->iem.s.enmEffOpSize)
3531 {
3532 case IEMMODE_16BIT:
3533 switch (pVCpu->iem.s.enmEffAddrMode)
3534 {
3535 case IEMMODE_16BIT:
3536 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3537 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3538 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3539 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3540 case IEMMODE_32BIT:
3541 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3542 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3543 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3544 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3545 case IEMMODE_64BIT:
3546 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3547 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3548 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3549 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3550 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3551 }
3552 break;
3553 case IEMMODE_64BIT:
3554 case IEMMODE_32BIT:
3555 switch (pVCpu->iem.s.enmEffAddrMode)
3556 {
3557 case IEMMODE_16BIT:
3558 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3559 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3560 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3561 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3562 case IEMMODE_32BIT:
3563 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3564 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3565 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3566 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3567 case IEMMODE_64BIT:
3568 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3569 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3570 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3571 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3572 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3573 }
3574 break;
3575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3576 }
3577 }
3578 else
3579 {
3580 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3581 switch (pVCpu->iem.s.enmEffOpSize)
3582 {
3583 case IEMMODE_16BIT:
3584 switch (pVCpu->iem.s.enmEffAddrMode)
3585 {
3586 case IEMMODE_16BIT:
3587 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3588 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3589 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3590 case IEMMODE_32BIT:
3591 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3592 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3593 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3594 case IEMMODE_64BIT:
3595 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3596 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3597 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3598 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3599 }
3600 break;
3601 case IEMMODE_64BIT:
3602 case IEMMODE_32BIT:
3603 switch (pVCpu->iem.s.enmEffAddrMode)
3604 {
3605 case IEMMODE_16BIT:
3606 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3607 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3608 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3609 case IEMMODE_32BIT:
3610 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3611 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3612 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3613 case IEMMODE_64BIT:
3614 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3615 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3616 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3617 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3618 }
3619 break;
3620 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3621 }
3622 }
3623}
3624
3625
3626/**
3627 * @opcode 0x70
3628 */
3629FNIEMOP_DEF(iemOp_jo_Jb)
3630{
3631 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3632 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3633 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3634
3635 IEM_MC_BEGIN(0, 0, 0, 0);
3636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3637 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3638 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3639 } IEM_MC_ELSE() {
3640 IEM_MC_ADVANCE_RIP_AND_FINISH();
3641 } IEM_MC_ENDIF();
3642 IEM_MC_END();
3643}
3644
3645
3646/**
3647 * @opcode 0x71
3648 */
3649FNIEMOP_DEF(iemOp_jno_Jb)
3650{
3651 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3652 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3653 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3654
3655 IEM_MC_BEGIN(0, 0, 0, 0);
3656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3657 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3658 IEM_MC_ADVANCE_RIP_AND_FINISH();
3659 } IEM_MC_ELSE() {
3660 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3661 } IEM_MC_ENDIF();
3662 IEM_MC_END();
3663}
3664
3665/**
3666 * @opcode 0x72
3667 */
3668FNIEMOP_DEF(iemOp_jc_Jb)
3669{
3670 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3671 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3672 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3673
3674 IEM_MC_BEGIN(0, 0, 0, 0);
3675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3676 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3677 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3678 } IEM_MC_ELSE() {
3679 IEM_MC_ADVANCE_RIP_AND_FINISH();
3680 } IEM_MC_ENDIF();
3681 IEM_MC_END();
3682}
3683
3684
3685/**
3686 * @opcode 0x73
3687 */
3688FNIEMOP_DEF(iemOp_jnc_Jb)
3689{
3690 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3691 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3692 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3693
3694 IEM_MC_BEGIN(0, 0, 0, 0);
3695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3696 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3697 IEM_MC_ADVANCE_RIP_AND_FINISH();
3698 } IEM_MC_ELSE() {
3699 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3700 } IEM_MC_ENDIF();
3701 IEM_MC_END();
3702}
3703
3704
3705/**
3706 * @opcode 0x74
3707 */
3708FNIEMOP_DEF(iemOp_je_Jb)
3709{
3710 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3711 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3712 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3713
3714 IEM_MC_BEGIN(0, 0, 0, 0);
3715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3716 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3717 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3718 } IEM_MC_ELSE() {
3719 IEM_MC_ADVANCE_RIP_AND_FINISH();
3720 } IEM_MC_ENDIF();
3721 IEM_MC_END();
3722}
3723
3724
3725/**
3726 * @opcode 0x75
3727 */
3728FNIEMOP_DEF(iemOp_jne_Jb)
3729{
3730 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3731 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3732 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3733
3734 IEM_MC_BEGIN(0, 0, 0, 0);
3735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3736 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3737 IEM_MC_ADVANCE_RIP_AND_FINISH();
3738 } IEM_MC_ELSE() {
3739 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3740 } IEM_MC_ENDIF();
3741 IEM_MC_END();
3742}
3743
3744
3745/**
3746 * @opcode 0x76
3747 */
3748FNIEMOP_DEF(iemOp_jbe_Jb)
3749{
3750 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3751 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3752 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3753
3754 IEM_MC_BEGIN(0, 0, 0, 0);
3755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3756 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3757 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3758 } IEM_MC_ELSE() {
3759 IEM_MC_ADVANCE_RIP_AND_FINISH();
3760 } IEM_MC_ENDIF();
3761 IEM_MC_END();
3762}
3763
3764
3765/**
3766 * @opcode 0x77
3767 */
3768FNIEMOP_DEF(iemOp_jnbe_Jb)
3769{
3770 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3771 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3772 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3773
3774 IEM_MC_BEGIN(0, 0, 0, 0);
3775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3776 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3777 IEM_MC_ADVANCE_RIP_AND_FINISH();
3778 } IEM_MC_ELSE() {
3779 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3780 } IEM_MC_ENDIF();
3781 IEM_MC_END();
3782}
3783
3784
3785/**
3786 * @opcode 0x78
3787 */
3788FNIEMOP_DEF(iemOp_js_Jb)
3789{
3790 IEMOP_MNEMONIC(js_Jb, "js Jb");
3791 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3792 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3793
3794 IEM_MC_BEGIN(0, 0, 0, 0);
3795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3796 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3797 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3798 } IEM_MC_ELSE() {
3799 IEM_MC_ADVANCE_RIP_AND_FINISH();
3800 } IEM_MC_ENDIF();
3801 IEM_MC_END();
3802}
3803
3804
3805/**
3806 * @opcode 0x79
3807 */
3808FNIEMOP_DEF(iemOp_jns_Jb)
3809{
3810 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3811 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3812 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3813
3814 IEM_MC_BEGIN(0, 0, 0, 0);
3815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3816 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3817 IEM_MC_ADVANCE_RIP_AND_FINISH();
3818 } IEM_MC_ELSE() {
3819 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3820 } IEM_MC_ENDIF();
3821 IEM_MC_END();
3822}
3823
3824
3825/**
3826 * @opcode 0x7a
3827 */
3828FNIEMOP_DEF(iemOp_jp_Jb)
3829{
3830 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3831 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3832 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3833
3834 IEM_MC_BEGIN(0, 0, 0, 0);
3835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3836 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3837 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3838 } IEM_MC_ELSE() {
3839 IEM_MC_ADVANCE_RIP_AND_FINISH();
3840 } IEM_MC_ENDIF();
3841 IEM_MC_END();
3842}
3843
3844
3845/**
3846 * @opcode 0x7b
3847 */
3848FNIEMOP_DEF(iemOp_jnp_Jb)
3849{
3850 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3851 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3852 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3853
3854 IEM_MC_BEGIN(0, 0, 0, 0);
3855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3856 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3857 IEM_MC_ADVANCE_RIP_AND_FINISH();
3858 } IEM_MC_ELSE() {
3859 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3860 } IEM_MC_ENDIF();
3861 IEM_MC_END();
3862}
3863
3864
3865/**
3866 * @opcode 0x7c
3867 */
3868FNIEMOP_DEF(iemOp_jl_Jb)
3869{
3870 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3871 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3872 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3873
3874 IEM_MC_BEGIN(0, 0, 0, 0);
3875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3876 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3877 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3878 } IEM_MC_ELSE() {
3879 IEM_MC_ADVANCE_RIP_AND_FINISH();
3880 } IEM_MC_ENDIF();
3881 IEM_MC_END();
3882}
3883
3884
3885/**
3886 * @opcode 0x7d
3887 */
3888FNIEMOP_DEF(iemOp_jnl_Jb)
3889{
3890 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3891 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3892 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3893
3894 IEM_MC_BEGIN(0, 0, 0, 0);
3895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3896 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3897 IEM_MC_ADVANCE_RIP_AND_FINISH();
3898 } IEM_MC_ELSE() {
3899 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3900 } IEM_MC_ENDIF();
3901 IEM_MC_END();
3902}
3903
3904
3905/**
3906 * @opcode 0x7e
3907 */
3908FNIEMOP_DEF(iemOp_jle_Jb)
3909{
3910 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3911 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3912 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3913
3914 IEM_MC_BEGIN(0, 0, 0, 0);
3915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3916 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3917 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3918 } IEM_MC_ELSE() {
3919 IEM_MC_ADVANCE_RIP_AND_FINISH();
3920 } IEM_MC_ENDIF();
3921 IEM_MC_END();
3922}
3923
3924
3925/**
3926 * @opcode 0x7f
3927 */
3928FNIEMOP_DEF(iemOp_jnle_Jb)
3929{
3930 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3931 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3932 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3933
3934 IEM_MC_BEGIN(0, 0, 0, 0);
3935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3936 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3937 IEM_MC_ADVANCE_RIP_AND_FINISH();
3938 } IEM_MC_ELSE() {
3939 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3940 } IEM_MC_ENDIF();
3941 IEM_MC_END();
3942}
3943
3944
3945/**
3946 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3947 * iemOp_Grp1_Eb_Ib_80.
3948 */
3949#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
3950 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3951 { \
3952 /* register target */ \
3953 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3954 IEM_MC_BEGIN(3, 0, 0, 0); \
3955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3956 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3957 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3958 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3959 \
3960 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3961 IEM_MC_REF_EFLAGS(pEFlags); \
3962 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3963 \
3964 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3965 IEM_MC_END(); \
3966 } \
3967 else \
3968 { \
3969 /* memory target */ \
3970 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3971 { \
3972 IEM_MC_BEGIN(3, 3, 0, 0); \
3973 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3974 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3976 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3977 \
3978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3979 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3980 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3981 IEMOP_HLP_DONE_DECODING(); \
3982 \
3983 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3984 IEM_MC_FETCH_EFLAGS(EFlags); \
3985 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3986 \
3987 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
3988 IEM_MC_COMMIT_EFLAGS(EFlags); \
3989 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3990 IEM_MC_END(); \
3991 } \
3992 else \
3993 { \
3994 (void)0
3995
3996#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3997 IEM_MC_BEGIN(3, 3, 0, 0); \
3998 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3999 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4001 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4002 \
4003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4004 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4005 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4006 IEMOP_HLP_DONE_DECODING(); \
4007 \
4008 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4009 IEM_MC_FETCH_EFLAGS(EFlags); \
4010 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
4011 \
4012 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
4013 IEM_MC_COMMIT_EFLAGS(EFlags); \
4014 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4015 IEM_MC_END(); \
4016 } \
4017 } \
4018 (void)0
4019
4020#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
4021 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4022 { \
4023 /* register target */ \
4024 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4025 IEM_MC_BEGIN(3, 0, 0, 0); \
4026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4027 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4028 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4029 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4030 \
4031 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4032 IEM_MC_REF_EFLAGS(pEFlags); \
4033 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4034 \
4035 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4036 IEM_MC_END(); \
4037 } \
4038 else \
4039 { \
4040 /* memory target */ \
4041 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4042 { \
4043 IEM_MC_BEGIN(3, 3, 0, 0); \
4044 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
4045 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4047 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4048 \
4049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4050 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4051 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4052 IEMOP_HLP_DONE_DECODING(); \
4053 \
4054 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4055 IEM_MC_FETCH_EFLAGS(EFlags); \
4056 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4057 \
4058 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
4059 IEM_MC_COMMIT_EFLAGS(EFlags); \
4060 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4061 IEM_MC_END(); \
4062 } \
4063 else \
4064 { \
4065 (void)0
4066
4067#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
4068 IEMOP_HLP_DONE_DECODING(); \
4069 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4070 } \
4071 } \
4072 (void)0
4073
4074
4075
4076/**
4077 * @opmaps grp1_80,grp1_83
4078 * @opcode /0
4079 */
4080FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
4081{
4082 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
4083 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
4084 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
4085}
4086
4087
4088/**
4089 * @opmaps grp1_80,grp1_83
4090 * @opcode /1
4091 */
4092FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
4093{
4094 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
4095 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
4096 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
4097}
4098
4099
4100/**
4101 * @opmaps grp1_80,grp1_83
4102 * @opcode /2
4103 */
4104FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4105{
4106 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4107 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4108 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4109}
4110
4111
4112/**
4113 * @opmaps grp1_80,grp1_83
4114 * @opcode /3
4115 */
4116FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4117{
4118 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4119 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4120 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4121}
4122
4123
4124/**
4125 * @opmaps grp1_80,grp1_83
4126 * @opcode /4
4127 */
4128FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4129{
4130 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4131 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4132 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4133}
4134
4135
4136/**
4137 * @opmaps grp1_80,grp1_83
4138 * @opcode /5
4139 */
4140FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4141{
4142 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4143 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4144 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4145}
4146
4147
4148/**
4149 * @opmaps grp1_80,grp1_83
4150 * @opcode /6
4151 */
4152FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4153{
4154 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4155 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4156 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4157}
4158
4159
4160/**
4161 * @opmaps grp1_80,grp1_83
4162 * @opcode /7
4163 */
4164FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4165{
4166 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4167 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4168 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4169}
4170
4171
4172/**
4173 * @opcode 0x80
4174 */
4175FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4176{
4177 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4178 switch (IEM_GET_MODRM_REG_8(bRm))
4179 {
4180 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4181 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4182 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4183 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4184 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4185 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4186 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4187 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4189 }
4190}
4191
4192
4193/**
4194 * Body for a group 1 binary operator.
4195 */
4196#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4197 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4198 { \
4199 /* register target */ \
4200 switch (pVCpu->iem.s.enmEffOpSize) \
4201 { \
4202 case IEMMODE_16BIT: \
4203 { \
4204 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4205 IEM_MC_BEGIN(3, 0, 0, 0); \
4206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4207 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4208 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4209 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4210 \
4211 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4212 IEM_MC_REF_EFLAGS(pEFlags); \
4213 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4214 \
4215 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4216 IEM_MC_END(); \
4217 break; \
4218 } \
4219 \
4220 case IEMMODE_32BIT: \
4221 { \
4222 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4223 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4225 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4226 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4227 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4228 \
4229 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4230 IEM_MC_REF_EFLAGS(pEFlags); \
4231 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4232 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4233 \
4234 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4235 IEM_MC_END(); \
4236 break; \
4237 } \
4238 \
4239 case IEMMODE_64BIT: \
4240 { \
4241 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4242 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4244 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4245 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4246 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4247 \
4248 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4249 IEM_MC_REF_EFLAGS(pEFlags); \
4250 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4251 \
4252 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4253 IEM_MC_END(); \
4254 break; \
4255 } \
4256 \
4257 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4258 } \
4259 } \
4260 else \
4261 { \
4262 /* memory target */ \
4263 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4264 { \
4265 switch (pVCpu->iem.s.enmEffOpSize) \
4266 { \
4267 case IEMMODE_16BIT: \
4268 { \
4269 IEM_MC_BEGIN(3, 3, 0, 0); \
4270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4271 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4272 \
4273 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4274 IEMOP_HLP_DONE_DECODING(); \
4275 \
4276 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4277 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4278 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4279 \
4280 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4281 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4282 IEM_MC_FETCH_EFLAGS(EFlags); \
4283 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4284 \
4285 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4286 IEM_MC_COMMIT_EFLAGS(EFlags); \
4287 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4288 IEM_MC_END(); \
4289 break; \
4290 } \
4291 \
4292 case IEMMODE_32BIT: \
4293 { \
4294 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4297 \
4298 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4299 IEMOP_HLP_DONE_DECODING(); \
4300 \
4301 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4302 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4303 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4304 \
4305 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4306 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4307 IEM_MC_FETCH_EFLAGS(EFlags); \
4308 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4309 \
4310 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4311 IEM_MC_COMMIT_EFLAGS(EFlags); \
4312 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4313 IEM_MC_END(); \
4314 break; \
4315 } \
4316 \
4317 case IEMMODE_64BIT: \
4318 { \
4319 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4320 \
4321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4323 \
4324 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4325 IEMOP_HLP_DONE_DECODING(); \
4326 \
4327 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4328 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4329 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4330 \
4331 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4332 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4333 IEM_MC_FETCH_EFLAGS(EFlags); \
4334 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4335 \
4336 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4337 IEM_MC_COMMIT_EFLAGS(EFlags); \
4338 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4339 IEM_MC_END(); \
4340 break; \
4341 } \
4342 \
4343 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4344 } \
4345 } \
4346 else \
4347 { \
4348 (void)0
4349/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4350#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4351 switch (pVCpu->iem.s.enmEffOpSize) \
4352 { \
4353 case IEMMODE_16BIT: \
4354 { \
4355 IEM_MC_BEGIN(3, 3, 0, 0); \
4356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4358 \
4359 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4360 IEMOP_HLP_DONE_DECODING(); \
4361 \
4362 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4363 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4364 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4365 \
4366 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4367 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4368 IEM_MC_FETCH_EFLAGS(EFlags); \
4369 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4370 \
4371 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4372 IEM_MC_COMMIT_EFLAGS(EFlags); \
4373 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4374 IEM_MC_END(); \
4375 break; \
4376 } \
4377 \
4378 case IEMMODE_32BIT: \
4379 { \
4380 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4383 \
4384 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4385 IEMOP_HLP_DONE_DECODING(); \
4386 \
4387 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4388 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4389 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4390 \
4391 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4392 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4393 IEM_MC_FETCH_EFLAGS(EFlags); \
4394 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4395 \
4396 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4397 IEM_MC_COMMIT_EFLAGS(EFlags); \
4398 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4399 IEM_MC_END(); \
4400 break; \
4401 } \
4402 \
4403 case IEMMODE_64BIT: \
4404 { \
4405 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4406 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4408 \
4409 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4410 IEMOP_HLP_DONE_DECODING(); \
4411 \
4412 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4413 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4414 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4415 \
4416 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4417 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4418 IEM_MC_FETCH_EFLAGS(EFlags); \
4419 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4420 \
4421 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4422 IEM_MC_COMMIT_EFLAGS(EFlags); \
4423 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4424 IEM_MC_END(); \
4425 break; \
4426 } \
4427 \
4428 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4429 } \
4430 } \
4431 } \
4432 (void)0
4433
4434/* read-only version */
4435#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4436 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4437 { \
4438 /* register target */ \
4439 switch (pVCpu->iem.s.enmEffOpSize) \
4440 { \
4441 case IEMMODE_16BIT: \
4442 { \
4443 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4444 IEM_MC_BEGIN(3, 0, 0, 0); \
4445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4446 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4447 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4448 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4449 \
4450 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4451 IEM_MC_REF_EFLAGS(pEFlags); \
4452 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4453 \
4454 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4455 IEM_MC_END(); \
4456 break; \
4457 } \
4458 \
4459 case IEMMODE_32BIT: \
4460 { \
4461 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4462 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4464 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4465 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4466 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4467 \
4468 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4469 IEM_MC_REF_EFLAGS(pEFlags); \
4470 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4471 \
4472 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4473 IEM_MC_END(); \
4474 break; \
4475 } \
4476 \
4477 case IEMMODE_64BIT: \
4478 { \
4479 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4480 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4482 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4483 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4484 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4485 \
4486 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4487 IEM_MC_REF_EFLAGS(pEFlags); \
4488 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4489 \
4490 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4491 IEM_MC_END(); \
4492 break; \
4493 } \
4494 \
4495 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4496 } \
4497 } \
4498 else \
4499 { \
4500 /* memory target */ \
4501 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4502 { \
4503 switch (pVCpu->iem.s.enmEffOpSize) \
4504 { \
4505 case IEMMODE_16BIT: \
4506 { \
4507 IEM_MC_BEGIN(3, 3, 0, 0); \
4508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4510 \
4511 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4512 IEMOP_HLP_DONE_DECODING(); \
4513 \
4514 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4515 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4516 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4517 \
4518 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4519 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4520 IEM_MC_FETCH_EFLAGS(EFlags); \
4521 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4522 \
4523 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
4524 IEM_MC_COMMIT_EFLAGS(EFlags); \
4525 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4526 IEM_MC_END(); \
4527 break; \
4528 } \
4529 \
4530 case IEMMODE_32BIT: \
4531 { \
4532 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4535 \
4536 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4537 IEMOP_HLP_DONE_DECODING(); \
4538 \
4539 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4540 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4541 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4542 \
4543 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4544 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4545 IEM_MC_FETCH_EFLAGS(EFlags); \
4546 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4547 \
4548 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
4549 IEM_MC_COMMIT_EFLAGS(EFlags); \
4550 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4551 IEM_MC_END(); \
4552 break; \
4553 } \
4554 \
4555 case IEMMODE_64BIT: \
4556 { \
4557 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4560 \
4561 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4562 IEMOP_HLP_DONE_DECODING(); \
4563 \
4564 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4565 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4566 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4567 \
4568 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4569 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4570 IEM_MC_FETCH_EFLAGS(EFlags); \
4571 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4572 \
4573 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
4574 IEM_MC_COMMIT_EFLAGS(EFlags); \
4575 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4576 IEM_MC_END(); \
4577 break; \
4578 } \
4579 \
4580 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4581 } \
4582 } \
4583 else \
4584 { \
4585 IEMOP_HLP_DONE_DECODING(); \
4586 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4587 } \
4588 } \
4589 (void)0
4590
4591
4592/**
4593 * @opmaps grp1_81
4594 * @opcode /0
4595 */
4596FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4597{
4598 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4599 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4600 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4601}
4602
4603
4604/**
4605 * @opmaps grp1_81
4606 * @opcode /1
4607 */
4608FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4609{
4610 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4611 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4612 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4613}
4614
4615
4616/**
4617 * @opmaps grp1_81
4618 * @opcode /2
4619 */
4620FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4621{
4622 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4623 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4624 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4625}
4626
4627
4628/**
4629 * @opmaps grp1_81
4630 * @opcode /3
4631 */
4632FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4633{
4634 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4635 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4636 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4637}
4638
4639
4640/**
4641 * @opmaps grp1_81
4642 * @opcode /4
4643 */
4644FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4645{
4646 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4647 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4648 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4649}
4650
4651
4652/**
4653 * @opmaps grp1_81
4654 * @opcode /5
4655 */
4656FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4657{
4658 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4659 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4660 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4661}
4662
4663
4664/**
4665 * @opmaps grp1_81
4666 * @opcode /6
4667 */
4668FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4669{
4670 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4671 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4672 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4673}
4674
4675
4676/**
4677 * @opmaps grp1_81
4678 * @opcode /7
4679 */
4680FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4681{
4682 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4683 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4684}
4685
4686
4687/**
4688 * @opcode 0x81
4689 */
4690FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4691{
4692 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4693 switch (IEM_GET_MODRM_REG_8(bRm))
4694 {
4695 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4696 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4697 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4698 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4699 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4700 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4701 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4702 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4703 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4704 }
4705}
4706
4707
4708/**
4709 * @opcode 0x82
4710 * @opmnemonic grp1_82
4711 * @opgroup og_groups
4712 */
4713FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4714{
4715 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4716 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4717}
4718
4719
4720/**
4721 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4722 * iemOp_Grp1_Ev_Ib.
4723 */
4724#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4725 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4726 { \
4727 /* \
4728 * Register target \
4729 */ \
4730 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4731 switch (pVCpu->iem.s.enmEffOpSize) \
4732 { \
4733 case IEMMODE_16BIT: \
4734 IEM_MC_BEGIN(3, 0, 0, 0); \
4735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4736 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4737 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4738 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4739 \
4740 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4741 IEM_MC_REF_EFLAGS(pEFlags); \
4742 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4743 \
4744 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4745 IEM_MC_END(); \
4746 break; \
4747 \
4748 case IEMMODE_32BIT: \
4749 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4751 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4752 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4753 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4754 \
4755 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4756 IEM_MC_REF_EFLAGS(pEFlags); \
4757 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4758 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4759 \
4760 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4761 IEM_MC_END(); \
4762 break; \
4763 \
4764 case IEMMODE_64BIT: \
4765 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4767 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4768 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4769 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4770 \
4771 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4772 IEM_MC_REF_EFLAGS(pEFlags); \
4773 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4774 \
4775 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4776 IEM_MC_END(); \
4777 break; \
4778 \
4779 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4780 } \
4781 } \
4782 else \
4783 { \
4784 /* \
4785 * Memory target. \
4786 */ \
4787 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4788 { \
4789 switch (pVCpu->iem.s.enmEffOpSize) \
4790 { \
4791 case IEMMODE_16BIT: \
4792 IEM_MC_BEGIN(3, 3, 0, 0); \
4793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4795 \
4796 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4797 IEMOP_HLP_DONE_DECODING(); \
4798 \
4799 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4800 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4801 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4802 \
4803 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4804 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4805 IEM_MC_FETCH_EFLAGS(EFlags); \
4806 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4807 \
4808 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4809 IEM_MC_COMMIT_EFLAGS(EFlags); \
4810 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4811 IEM_MC_END(); \
4812 break; \
4813 \
4814 case IEMMODE_32BIT: \
4815 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4818 \
4819 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4820 IEMOP_HLP_DONE_DECODING(); \
4821 \
4822 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4823 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4824 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4825 \
4826 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4827 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4828 IEM_MC_FETCH_EFLAGS(EFlags); \
4829 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4830 \
4831 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4832 IEM_MC_COMMIT_EFLAGS(EFlags); \
4833 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4834 IEM_MC_END(); \
4835 break; \
4836 \
4837 case IEMMODE_64BIT: \
4838 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4841 \
4842 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4843 IEMOP_HLP_DONE_DECODING(); \
4844 \
4845 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4846 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4847 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4848 \
4849 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4850 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4851 IEM_MC_FETCH_EFLAGS(EFlags); \
4852 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4853 \
4854 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4855 IEM_MC_COMMIT_EFLAGS(EFlags); \
4856 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4857 IEM_MC_END(); \
4858 break; \
4859 \
4860 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4861 } \
4862 } \
4863 else \
4864 { \
4865 (void)0
4866/* Separate macro to work around parsing issue in IEMAllInstPython.py */
4867#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4868 switch (pVCpu->iem.s.enmEffOpSize) \
4869 { \
4870 case IEMMODE_16BIT: \
4871 IEM_MC_BEGIN(3, 3, 0, 0); \
4872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4874 \
4875 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4876 IEMOP_HLP_DONE_DECODING(); \
4877 \
4878 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4879 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4880 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4881 \
4882 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4883 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4884 IEM_MC_FETCH_EFLAGS(EFlags); \
4885 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4886 \
4887 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4888 IEM_MC_COMMIT_EFLAGS(EFlags); \
4889 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4890 IEM_MC_END(); \
4891 break; \
4892 \
4893 case IEMMODE_32BIT: \
4894 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4897 \
4898 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4899 IEMOP_HLP_DONE_DECODING(); \
4900 \
4901 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4902 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4903 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4904 \
4905 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4906 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4907 IEM_MC_FETCH_EFLAGS(EFlags); \
4908 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4909 \
4910 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4911 IEM_MC_COMMIT_EFLAGS(EFlags); \
4912 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4913 IEM_MC_END(); \
4914 break; \
4915 \
4916 case IEMMODE_64BIT: \
4917 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4919 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4920 \
4921 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4922 IEMOP_HLP_DONE_DECODING(); \
4923 \
4924 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4925 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4926 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4927 \
4928 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4929 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4930 IEM_MC_FETCH_EFLAGS(EFlags); \
4931 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4932 \
4933 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4934 IEM_MC_COMMIT_EFLAGS(EFlags); \
4935 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4936 IEM_MC_END(); \
4937 break; \
4938 \
4939 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4940 } \
4941 } \
4942 } \
4943 (void)0
4944
4945/* read-only variant */
4946#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4947 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4948 { \
4949 /* \
4950 * Register target \
4951 */ \
4952 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4953 switch (pVCpu->iem.s.enmEffOpSize) \
4954 { \
4955 case IEMMODE_16BIT: \
4956 IEM_MC_BEGIN(3, 0, 0, 0); \
4957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4958 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4959 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4960 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4961 \
4962 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4963 IEM_MC_REF_EFLAGS(pEFlags); \
4964 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4965 \
4966 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4967 IEM_MC_END(); \
4968 break; \
4969 \
4970 case IEMMODE_32BIT: \
4971 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4973 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4974 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4975 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4976 \
4977 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4978 IEM_MC_REF_EFLAGS(pEFlags); \
4979 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4980 \
4981 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4982 IEM_MC_END(); \
4983 break; \
4984 \
4985 case IEMMODE_64BIT: \
4986 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4988 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4989 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4990 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4991 \
4992 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4993 IEM_MC_REF_EFLAGS(pEFlags); \
4994 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4995 \
4996 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4997 IEM_MC_END(); \
4998 break; \
4999 \
5000 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5001 } \
5002 } \
5003 else \
5004 { \
5005 /* \
5006 * Memory target. \
5007 */ \
5008 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
5009 { \
5010 switch (pVCpu->iem.s.enmEffOpSize) \
5011 { \
5012 case IEMMODE_16BIT: \
5013 IEM_MC_BEGIN(3, 3, 0, 0); \
5014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5016 \
5017 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5018 IEMOP_HLP_DONE_DECODING(); \
5019 \
5020 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5021 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
5022 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5023 \
5024 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
5025 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5026 IEM_MC_FETCH_EFLAGS(EFlags); \
5027 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
5028 \
5029 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
5030 IEM_MC_COMMIT_EFLAGS(EFlags); \
5031 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5032 IEM_MC_END(); \
5033 break; \
5034 \
5035 case IEMMODE_32BIT: \
5036 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
5037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5039 \
5040 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5041 IEMOP_HLP_DONE_DECODING(); \
5042 \
5043 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5044 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
5045 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5046 \
5047 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
5048 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5049 IEM_MC_FETCH_EFLAGS(EFlags); \
5050 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
5051 \
5052 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
5053 IEM_MC_COMMIT_EFLAGS(EFlags); \
5054 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5055 IEM_MC_END(); \
5056 break; \
5057 \
5058 case IEMMODE_64BIT: \
5059 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5062 \
5063 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5064 IEMOP_HLP_DONE_DECODING(); \
5065 \
5066 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5067 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
5068 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5069 \
5070 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
5071 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5072 IEM_MC_FETCH_EFLAGS(EFlags); \
5073 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5074 \
5075 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
5076 IEM_MC_COMMIT_EFLAGS(EFlags); \
5077 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5078 IEM_MC_END(); \
5079 break; \
5080 \
5081 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5082 } \
5083 } \
5084 else \
5085 { \
5086 IEMOP_HLP_DONE_DECODING(); \
5087 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5088 } \
5089 } \
5090 (void)0
5091
5092/**
5093 * @opmaps grp1_83
5094 * @opcode /0
5095 */
5096FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5097{
5098 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5099 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
5100 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
5101}
5102
5103
5104/**
5105 * @opmaps grp1_83
5106 * @opcode /1
5107 */
5108FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5109{
5110 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5111 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
5112 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
5113}
5114
5115
5116/**
5117 * @opmaps grp1_83
5118 * @opcode /2
5119 */
5120FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5121{
5122 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5123 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5124 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5125}
5126
5127
5128/**
5129 * @opmaps grp1_83
5130 * @opcode /3
5131 */
5132FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5133{
5134 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5135 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5136 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5137}
5138
5139
5140/**
5141 * @opmaps grp1_83
5142 * @opcode /4
5143 */
5144FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5145{
5146 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5147 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5148 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5149}
5150
5151
5152/**
5153 * @opmaps grp1_83
5154 * @opcode /5
5155 */
5156FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5157{
5158 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5159 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5160 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5161}
5162
5163
5164/**
5165 * @opmaps grp1_83
5166 * @opcode /6
5167 */
5168FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5169{
5170 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5171 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5172 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5173}
5174
5175
5176/**
5177 * @opmaps grp1_83
5178 * @opcode /7
5179 */
5180FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5181{
5182 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5183 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5184}
5185
5186
5187/**
5188 * @opcode 0x83
5189 */
5190FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5191{
5192 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5193 to the 386 even if absent in the intel reference manuals and some
5194 3rd party opcode listings. */
5195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5196 switch (IEM_GET_MODRM_REG_8(bRm))
5197 {
5198 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5199 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5200 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5201 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5202 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5203 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5204 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5205 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5206 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5207 }
5208}
5209
5210
5211/**
5212 * @opcode 0x84
5213 */
5214FNIEMOP_DEF(iemOp_test_Eb_Gb)
5215{
5216 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5217 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5218 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8);
5219 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
5220}
5221
5222
5223/**
5224 * @opcode 0x85
5225 */
5226FNIEMOP_DEF(iemOp_test_Ev_Gv)
5227{
5228 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5229 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5230 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64);
5231}
5232
5233
5234/**
5235 * @opcode 0x86
5236 */
5237FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5238{
5239 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5240 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5241
5242 /*
5243 * If rm is denoting a register, no more instruction bytes.
5244 */
5245 if (IEM_IS_MODRM_REG_MODE(bRm))
5246 {
5247 IEM_MC_BEGIN(0, 2, 0, 0);
5248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5249 IEM_MC_LOCAL(uint8_t, uTmp1);
5250 IEM_MC_LOCAL(uint8_t, uTmp2);
5251
5252 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5253 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5254 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5255 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5256
5257 IEM_MC_ADVANCE_RIP_AND_FINISH();
5258 IEM_MC_END();
5259 }
5260 else
5261 {
5262 /*
5263 * We're accessing memory.
5264 */
5265 IEM_MC_BEGIN(2, 4, 0, 0);
5266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5267 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5268 IEM_MC_LOCAL(uint8_t, uTmpReg);
5269 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
5270 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1);
5271
5272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5274 IEM_MC_MEM_MAP_U8_RW(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5275 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5276 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5277 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
5278 else
5279 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
5280 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Mem, bUnmapInfo);
5281 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5282
5283 IEM_MC_ADVANCE_RIP_AND_FINISH();
5284 IEM_MC_END();
5285 }
5286}
5287
5288
5289/**
5290 * @opcode 0x87
5291 */
5292FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5293{
5294 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5295 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5296
5297 /*
5298 * If rm is denoting a register, no more instruction bytes.
5299 */
5300 if (IEM_IS_MODRM_REG_MODE(bRm))
5301 {
5302 switch (pVCpu->iem.s.enmEffOpSize)
5303 {
5304 case IEMMODE_16BIT:
5305 IEM_MC_BEGIN(0, 2, 0, 0);
5306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5307 IEM_MC_LOCAL(uint16_t, uTmp1);
5308 IEM_MC_LOCAL(uint16_t, uTmp2);
5309
5310 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5311 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5312 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5313 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5314
5315 IEM_MC_ADVANCE_RIP_AND_FINISH();
5316 IEM_MC_END();
5317 break;
5318
5319 case IEMMODE_32BIT:
5320 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5322 IEM_MC_LOCAL(uint32_t, uTmp1);
5323 IEM_MC_LOCAL(uint32_t, uTmp2);
5324
5325 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5326 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5327 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5328 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5329
5330 IEM_MC_ADVANCE_RIP_AND_FINISH();
5331 IEM_MC_END();
5332 break;
5333
5334 case IEMMODE_64BIT:
5335 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5337 IEM_MC_LOCAL(uint64_t, uTmp1);
5338 IEM_MC_LOCAL(uint64_t, uTmp2);
5339
5340 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5341 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5342 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5343 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5344
5345 IEM_MC_ADVANCE_RIP_AND_FINISH();
5346 IEM_MC_END();
5347 break;
5348
5349 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5350 }
5351 }
5352 else
5353 {
5354 /*
5355 * We're accessing memory.
5356 */
5357 switch (pVCpu->iem.s.enmEffOpSize)
5358 {
5359 case IEMMODE_16BIT:
5360 IEM_MC_BEGIN(2, 4, 0, 0);
5361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5362 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5363 IEM_MC_LOCAL(uint16_t, uTmpReg);
5364 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
5365 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1);
5366
5367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5369 IEM_MC_MEM_MAP_U16_RW(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5370 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5371 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5372 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
5373 else
5374 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
5375 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Mem, bUnmapInfo);
5376 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5377
5378 IEM_MC_ADVANCE_RIP_AND_FINISH();
5379 IEM_MC_END();
5380 break;
5381
5382 case IEMMODE_32BIT:
5383 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0);
5384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5385 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5386 IEM_MC_LOCAL(uint32_t, uTmpReg);
5387 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
5388 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1);
5389
5390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5392 IEM_MC_MEM_MAP_U32_RW(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5393 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5394 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5395 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
5396 else
5397 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
5398 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Mem, bUnmapInfo);
5399 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5400
5401 IEM_MC_ADVANCE_RIP_AND_FINISH();
5402 IEM_MC_END();
5403 break;
5404
5405 case IEMMODE_64BIT:
5406 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0);
5407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5408 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5409 IEM_MC_LOCAL(uint64_t, uTmpReg);
5410 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
5411 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1);
5412
5413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5415 IEM_MC_MEM_MAP_U64_RW(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5416 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5417 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5418 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
5419 else
5420 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
5421 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Mem, bUnmapInfo);
5422 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5423
5424 IEM_MC_ADVANCE_RIP_AND_FINISH();
5425 IEM_MC_END();
5426 break;
5427
5428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5429 }
5430 }
5431}
5432
5433
5434/**
5435 * @opcode 0x88
5436 */
5437FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5438{
5439 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5440
5441 uint8_t bRm;
5442 IEM_OPCODE_GET_NEXT_U8(&bRm);
5443
5444 /*
5445 * If rm is denoting a register, no more instruction bytes.
5446 */
5447 if (IEM_IS_MODRM_REG_MODE(bRm))
5448 {
5449 IEM_MC_BEGIN(0, 1, 0, 0);
5450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5451 IEM_MC_LOCAL(uint8_t, u8Value);
5452 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5453 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5454 IEM_MC_ADVANCE_RIP_AND_FINISH();
5455 IEM_MC_END();
5456 }
5457 else
5458 {
5459 /*
5460 * We're writing a register to memory.
5461 */
5462 IEM_MC_BEGIN(0, 2, 0, 0);
5463 IEM_MC_LOCAL(uint8_t, u8Value);
5464 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5467 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5468 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5469 IEM_MC_ADVANCE_RIP_AND_FINISH();
5470 IEM_MC_END();
5471 }
5472}
5473
5474
5475/**
5476 * @opcode 0x89
5477 */
5478FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5479{
5480 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5481
5482 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5483
5484 /*
5485 * If rm is denoting a register, no more instruction bytes.
5486 */
5487 if (IEM_IS_MODRM_REG_MODE(bRm))
5488 {
5489 switch (pVCpu->iem.s.enmEffOpSize)
5490 {
5491 case IEMMODE_16BIT:
5492 IEM_MC_BEGIN(0, 1, 0, 0);
5493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5494 IEM_MC_LOCAL(uint16_t, u16Value);
5495 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5496 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5497 IEM_MC_ADVANCE_RIP_AND_FINISH();
5498 IEM_MC_END();
5499 break;
5500
5501 case IEMMODE_32BIT:
5502 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5504 IEM_MC_LOCAL(uint32_t, u32Value);
5505 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5506 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5507 IEM_MC_ADVANCE_RIP_AND_FINISH();
5508 IEM_MC_END();
5509 break;
5510
5511 case IEMMODE_64BIT:
5512 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5514 IEM_MC_LOCAL(uint64_t, u64Value);
5515 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5516 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5517 IEM_MC_ADVANCE_RIP_AND_FINISH();
5518 IEM_MC_END();
5519 break;
5520
5521 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5522 }
5523 }
5524 else
5525 {
5526 /*
5527 * We're writing a register to memory.
5528 */
5529 switch (pVCpu->iem.s.enmEffOpSize)
5530 {
5531 case IEMMODE_16BIT:
5532 IEM_MC_BEGIN(0, 2, 0, 0);
5533 IEM_MC_LOCAL(uint16_t, u16Value);
5534 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5537 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5538 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5539 IEM_MC_ADVANCE_RIP_AND_FINISH();
5540 IEM_MC_END();
5541 break;
5542
5543 case IEMMODE_32BIT:
5544 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5545 IEM_MC_LOCAL(uint32_t, u32Value);
5546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5547 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5549 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5550 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5551 IEM_MC_ADVANCE_RIP_AND_FINISH();
5552 IEM_MC_END();
5553 break;
5554
5555 case IEMMODE_64BIT:
5556 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5557 IEM_MC_LOCAL(uint64_t, u64Value);
5558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5561 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5562 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5563 IEM_MC_ADVANCE_RIP_AND_FINISH();
5564 IEM_MC_END();
5565 break;
5566
5567 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5568 }
5569 }
5570}
5571
5572
5573/**
5574 * @opcode 0x8a
5575 */
5576FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5577{
5578 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5579
5580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5581
5582 /*
5583 * If rm is denoting a register, no more instruction bytes.
5584 */
5585 if (IEM_IS_MODRM_REG_MODE(bRm))
5586 {
5587 IEM_MC_BEGIN(0, 1, 0, 0);
5588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5589 IEM_MC_LOCAL(uint8_t, u8Value);
5590 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5591 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5592 IEM_MC_ADVANCE_RIP_AND_FINISH();
5593 IEM_MC_END();
5594 }
5595 else
5596 {
5597 /*
5598 * We're loading a register from memory.
5599 */
5600 IEM_MC_BEGIN(0, 2, 0, 0);
5601 IEM_MC_LOCAL(uint8_t, u8Value);
5602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5605 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5606 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5607 IEM_MC_ADVANCE_RIP_AND_FINISH();
5608 IEM_MC_END();
5609 }
5610}
5611
5612
5613/**
5614 * @opcode 0x8b
5615 */
5616FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5617{
5618 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5619
5620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5621
5622 /*
5623 * If rm is denoting a register, no more instruction bytes.
5624 */
5625 if (IEM_IS_MODRM_REG_MODE(bRm))
5626 {
5627 switch (pVCpu->iem.s.enmEffOpSize)
5628 {
5629 case IEMMODE_16BIT:
5630 IEM_MC_BEGIN(0, 1, 0, 0);
5631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5632 IEM_MC_LOCAL(uint16_t, u16Value);
5633 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5634 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5635 IEM_MC_ADVANCE_RIP_AND_FINISH();
5636 IEM_MC_END();
5637 break;
5638
5639 case IEMMODE_32BIT:
5640 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5642 IEM_MC_LOCAL(uint32_t, u32Value);
5643 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5644 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5645 IEM_MC_ADVANCE_RIP_AND_FINISH();
5646 IEM_MC_END();
5647 break;
5648
5649 case IEMMODE_64BIT:
5650 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5652 IEM_MC_LOCAL(uint64_t, u64Value);
5653 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5654 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5655 IEM_MC_ADVANCE_RIP_AND_FINISH();
5656 IEM_MC_END();
5657 break;
5658
5659 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5660 }
5661 }
5662 else
5663 {
5664 /*
5665 * We're loading a register from memory.
5666 */
5667 switch (pVCpu->iem.s.enmEffOpSize)
5668 {
5669 case IEMMODE_16BIT:
5670 IEM_MC_BEGIN(0, 2, 0, 0);
5671 IEM_MC_LOCAL(uint16_t, u16Value);
5672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5675 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5676 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5677 IEM_MC_ADVANCE_RIP_AND_FINISH();
5678 IEM_MC_END();
5679 break;
5680
5681 case IEMMODE_32BIT:
5682 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5683 IEM_MC_LOCAL(uint32_t, u32Value);
5684 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5685 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5687 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5688 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5689 IEM_MC_ADVANCE_RIP_AND_FINISH();
5690 IEM_MC_END();
5691 break;
5692
5693 case IEMMODE_64BIT:
5694 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5695 IEM_MC_LOCAL(uint64_t, u64Value);
5696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5699 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5700 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5701 IEM_MC_ADVANCE_RIP_AND_FINISH();
5702 IEM_MC_END();
5703 break;
5704
5705 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5706 }
5707 }
5708}
5709
5710
5711/**
5712 * opcode 0x63
5713 * @todo Table fixme
5714 */
5715FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5716{
5717 if (!IEM_IS_64BIT_CODE(pVCpu))
5718 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5719 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5720 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5721 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5722}
5723
5724
5725/**
5726 * @opcode 0x8c
5727 */
5728FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5729{
5730 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5731
5732 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5733
5734 /*
5735 * Check that the destination register exists. The REX.R prefix is ignored.
5736 */
5737 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5738 if (iSegReg > X86_SREG_GS)
5739 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5740
5741 /*
5742 * If rm is denoting a register, no more instruction bytes.
5743 * In that case, the operand size is respected and the upper bits are
5744 * cleared (starting with some pentium).
5745 */
5746 if (IEM_IS_MODRM_REG_MODE(bRm))
5747 {
5748 switch (pVCpu->iem.s.enmEffOpSize)
5749 {
5750 case IEMMODE_16BIT:
5751 IEM_MC_BEGIN(0, 1, 0, 0);
5752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5753 IEM_MC_LOCAL(uint16_t, u16Value);
5754 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5755 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5756 IEM_MC_ADVANCE_RIP_AND_FINISH();
5757 IEM_MC_END();
5758 break;
5759
5760 case IEMMODE_32BIT:
5761 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5763 IEM_MC_LOCAL(uint32_t, u32Value);
5764 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5765 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5766 IEM_MC_ADVANCE_RIP_AND_FINISH();
5767 IEM_MC_END();
5768 break;
5769
5770 case IEMMODE_64BIT:
5771 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5773 IEM_MC_LOCAL(uint64_t, u64Value);
5774 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5775 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5776 IEM_MC_ADVANCE_RIP_AND_FINISH();
5777 IEM_MC_END();
5778 break;
5779
5780 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5781 }
5782 }
5783 else
5784 {
5785 /*
5786 * We're saving the register to memory. The access is word sized
5787 * regardless of operand size prefixes.
5788 */
5789#if 0 /* not necessary */
5790 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5791#endif
5792 IEM_MC_BEGIN(0, 2, 0, 0);
5793 IEM_MC_LOCAL(uint16_t, u16Value);
5794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5797 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5798 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5799 IEM_MC_ADVANCE_RIP_AND_FINISH();
5800 IEM_MC_END();
5801 }
5802}
5803
5804
5805
5806
5807/**
5808 * @opcode 0x8d
5809 */
5810FNIEMOP_DEF(iemOp_lea_Gv_M)
5811{
5812 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5813 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5814 if (IEM_IS_MODRM_REG_MODE(bRm))
5815 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5816
5817 switch (pVCpu->iem.s.enmEffOpSize)
5818 {
5819 case IEMMODE_16BIT:
5820 IEM_MC_BEGIN(0, 2, 0, 0);
5821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5824 IEM_MC_LOCAL(uint16_t, u16Cast);
5825 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5826 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5827 IEM_MC_ADVANCE_RIP_AND_FINISH();
5828 IEM_MC_END();
5829 break;
5830
5831 case IEMMODE_32BIT:
5832 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5836 IEM_MC_LOCAL(uint32_t, u32Cast);
5837 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5838 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5839 IEM_MC_ADVANCE_RIP_AND_FINISH();
5840 IEM_MC_END();
5841 break;
5842
5843 case IEMMODE_64BIT:
5844 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5848 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5849 IEM_MC_ADVANCE_RIP_AND_FINISH();
5850 IEM_MC_END();
5851 break;
5852
5853 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5854 }
5855}
5856
5857
5858/**
5859 * @opcode 0x8e
5860 */
5861FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5862{
5863 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5864
5865 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5866
5867 /*
5868 * The practical operand size is 16-bit.
5869 */
5870#if 0 /* not necessary */
5871 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5872#endif
5873
5874 /*
5875 * Check that the destination register exists and can be used with this
5876 * instruction. The REX.R prefix is ignored.
5877 */
5878 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5879 /** @todo r=bird: What does 8086 do here wrt CS? */
5880 if ( iSegReg == X86_SREG_CS
5881 || iSegReg > X86_SREG_GS)
5882 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5883
5884 /*
5885 * If rm is denoting a register, no more instruction bytes.
5886 *
5887 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
5888 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
5889 * register. This is a restriction of the current recompiler
5890 * approach.
5891 */
5892 if (IEM_IS_MODRM_REG_MODE(bRm))
5893 {
5894#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
5895 IEM_MC_BEGIN(2, 0, 0, a_fCImplFlags); \
5896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5897 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5898 IEM_MC_ARG(uint16_t, u16Value, 1); \
5899 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5900 IEM_MC_HINT_FLUSH_GUEST_SHADOW_SREG(iSegReg); \
5901 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, iemCImpl_load_SReg, iSRegArg, u16Value); \
5902 IEM_MC_END()
5903
5904 if (iSegReg == X86_SREG_SS)
5905 {
5906 if (IEM_IS_32BIT_CODE(pVCpu))
5907 {
5908 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5909 }
5910 else
5911 {
5912 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5913 }
5914 }
5915 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5916 {
5917 IEMOP_MOV_SW_EV_REG_BODY(0);
5918 }
5919 else
5920 {
5921 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
5922 }
5923#undef IEMOP_MOV_SW_EV_REG_BODY
5924 }
5925 else
5926 {
5927 /*
5928 * We're loading the register from memory. The access is word sized
5929 * regardless of operand size prefixes.
5930 */
5931#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
5932 IEM_MC_BEGIN(2, 1, 0, a_fCImplFlags); \
5933 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5934 IEM_MC_ARG(uint16_t, u16Value, 1); \
5935 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5938 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5939 IEM_MC_HINT_FLUSH_GUEST_SHADOW_SREG(iSegReg); \
5940 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, iemCImpl_load_SReg, iSRegArg, u16Value); \
5941 IEM_MC_END()
5942
5943 if (iSegReg == X86_SREG_SS)
5944 {
5945 if (IEM_IS_32BIT_CODE(pVCpu))
5946 {
5947 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5948 }
5949 else
5950 {
5951 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5952 }
5953 }
5954 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5955 {
5956 IEMOP_MOV_SW_EV_MEM_BODY(0);
5957 }
5958 else
5959 {
5960 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
5961 }
5962#undef IEMOP_MOV_SW_EV_MEM_BODY
5963 }
5964}
5965
5966
5967/** Opcode 0x8f /0. */
5968FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5969{
5970 /* This bugger is rather annoying as it requires rSP to be updated before
5971 doing the effective address calculations. Will eventually require a
5972 split between the R/M+SIB decoding and the effective address
5973 calculation - which is something that is required for any attempt at
5974 reusing this code for a recompiler. It may also be good to have if we
5975 need to delay #UD exception caused by invalid lock prefixes.
5976
5977 For now, we'll do a mostly safe interpreter-only implementation here. */
5978 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5979 * now until tests show it's checked.. */
5980 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5981
5982 /* Register access is relatively easy and can share code. */
5983 if (IEM_IS_MODRM_REG_MODE(bRm))
5984 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5985
5986 /*
5987 * Memory target.
5988 *
5989 * Intel says that RSP is incremented before it's used in any effective
5990 * address calcuations. This means some serious extra annoyance here since
5991 * we decode and calculate the effective address in one step and like to
5992 * delay committing registers till everything is done.
5993 *
5994 * So, we'll decode and calculate the effective address twice. This will
5995 * require some recoding if turned into a recompiler.
5996 */
5997 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5998
5999#if 1 /* This can be compiled, optimize later if needed. */
6000 switch (pVCpu->iem.s.enmEffOpSize)
6001 {
6002 case IEMMODE_16BIT:
6003 IEM_MC_BEGIN(2, 0, 0, 0);
6004 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
6006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6007 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6008 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(X86_GREG_xSP);
6009 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
6010 IEM_MC_END();
6011 break;
6012
6013 case IEMMODE_32BIT:
6014 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
6015 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
6017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6018 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6019 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(X86_GREG_xSP);
6020 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
6021 IEM_MC_END();
6022 break;
6023
6024 case IEMMODE_64BIT:
6025 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
6026 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
6028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6029 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6030 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(X86_GREG_xSP);
6031 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
6032 IEM_MC_END();
6033 break;
6034
6035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6036 }
6037
6038#else
6039# ifndef TST_IEM_CHECK_MC
6040 /* Calc effective address with modified ESP. */
6041/** @todo testcase */
6042 RTGCPTR GCPtrEff;
6043 VBOXSTRICTRC rcStrict;
6044 switch (pVCpu->iem.s.enmEffOpSize)
6045 {
6046 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
6047 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
6048 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
6049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6050 }
6051 if (rcStrict != VINF_SUCCESS)
6052 return rcStrict;
6053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6054
6055 /* Perform the operation - this should be CImpl. */
6056 RTUINT64U TmpRsp;
6057 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
6058 switch (pVCpu->iem.s.enmEffOpSize)
6059 {
6060 case IEMMODE_16BIT:
6061 {
6062 uint16_t u16Value;
6063 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
6064 if (rcStrict == VINF_SUCCESS)
6065 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
6066 break;
6067 }
6068
6069 case IEMMODE_32BIT:
6070 {
6071 uint32_t u32Value;
6072 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
6073 if (rcStrict == VINF_SUCCESS)
6074 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
6075 break;
6076 }
6077
6078 case IEMMODE_64BIT:
6079 {
6080 uint64_t u64Value;
6081 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
6082 if (rcStrict == VINF_SUCCESS)
6083 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
6084 break;
6085 }
6086
6087 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6088 }
6089 if (rcStrict == VINF_SUCCESS)
6090 {
6091 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
6092 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
6093 }
6094 return rcStrict;
6095
6096# else
6097 return VERR_IEM_IPE_2;
6098# endif
6099#endif
6100}
6101
6102
6103/**
6104 * @opcode 0x8f
6105 */
6106FNIEMOP_DEF(iemOp_Grp1A__xop)
6107{
6108 /*
6109 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
6110 * three byte VEX prefix, except that the mmmmm field cannot have the values
6111 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
6112 */
6113 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6114 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
6115 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
6116
6117 IEMOP_MNEMONIC(xop, "xop");
6118 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
6119 {
6120 /** @todo Test when exctly the XOP conformance checks kick in during
6121 * instruction decoding and fetching (using \#PF). */
6122 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6123 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6124 if ( ( pVCpu->iem.s.fPrefixes
6125 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6126 == 0)
6127 {
6128 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6129 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6130 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6131 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6132 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6133 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6134 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6135 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6136 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6137
6138 /** @todo XOP: Just use new tables and decoders. */
6139 switch (bRm & 0x1f)
6140 {
6141 case 8: /* xop opcode map 8. */
6142 IEMOP_BITCH_ABOUT_STUB();
6143 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6144
6145 case 9: /* xop opcode map 9. */
6146 IEMOP_BITCH_ABOUT_STUB();
6147 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6148
6149 case 10: /* xop opcode map 10. */
6150 IEMOP_BITCH_ABOUT_STUB();
6151 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6152
6153 default:
6154 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6155 IEMOP_RAISE_INVALID_OPCODE_RET();
6156 }
6157 }
6158 else
6159 Log(("XOP: Invalid prefix mix!\n"));
6160 }
6161 else
6162 Log(("XOP: XOP support disabled!\n"));
6163 IEMOP_RAISE_INVALID_OPCODE_RET();
6164}
6165
6166
6167/**
6168 * Common 'xchg reg,rAX' helper.
6169 */
6170FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6171{
6172 iReg |= pVCpu->iem.s.uRexB;
6173 switch (pVCpu->iem.s.enmEffOpSize)
6174 {
6175 case IEMMODE_16BIT:
6176 IEM_MC_BEGIN(0, 2, 0, 0);
6177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6178 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6179 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6180 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6181 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6182 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6183 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6184 IEM_MC_ADVANCE_RIP_AND_FINISH();
6185 IEM_MC_END();
6186 break;
6187
6188 case IEMMODE_32BIT:
6189 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6191 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6192 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6193 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6194 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6195 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6196 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6197 IEM_MC_ADVANCE_RIP_AND_FINISH();
6198 IEM_MC_END();
6199 break;
6200
6201 case IEMMODE_64BIT:
6202 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6204 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6205 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6206 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6207 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6208 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6209 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6210 IEM_MC_ADVANCE_RIP_AND_FINISH();
6211 IEM_MC_END();
6212 break;
6213
6214 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6215 }
6216}
6217
6218
6219/**
6220 * @opcode 0x90
6221 */
6222FNIEMOP_DEF(iemOp_nop)
6223{
6224 /* R8/R8D and RAX/EAX can be exchanged. */
6225 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6226 {
6227 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6228 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6229 }
6230
6231 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6232 {
6233 IEMOP_MNEMONIC(pause, "pause");
6234 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6235 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6236 if (!IEM_IS_IN_GUEST(pVCpu))
6237 { /* probable */ }
6238#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6239 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6240 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmx_pause);
6241#endif
6242#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6243 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6244 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_svm_pause);
6245#endif
6246 }
6247 else
6248 IEMOP_MNEMONIC(nop, "nop");
6249 /** @todo testcase: lock nop; lock pause */
6250 IEM_MC_BEGIN(0, 0, 0, 0);
6251 IEMOP_HLP_DONE_DECODING();
6252 IEM_MC_ADVANCE_RIP_AND_FINISH();
6253 IEM_MC_END();
6254}
6255
6256
6257/**
6258 * @opcode 0x91
6259 */
6260FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6261{
6262 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6263 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6264}
6265
6266
6267/**
6268 * @opcode 0x92
6269 */
6270FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6271{
6272 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6273 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6274}
6275
6276
6277/**
6278 * @opcode 0x93
6279 */
6280FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6281{
6282 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6283 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6284}
6285
6286
6287/**
6288 * @opcode 0x94
6289 */
6290FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6291{
6292 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6293 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6294}
6295
6296
6297/**
6298 * @opcode 0x95
6299 */
6300FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6301{
6302 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6303 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6304}
6305
6306
6307/**
6308 * @opcode 0x96
6309 */
6310FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6311{
6312 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6313 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6314}
6315
6316
6317/**
6318 * @opcode 0x97
6319 */
6320FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6321{
6322 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6323 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6324}
6325
6326
6327/**
6328 * @opcode 0x98
6329 */
6330FNIEMOP_DEF(iemOp_cbw)
6331{
6332 switch (pVCpu->iem.s.enmEffOpSize)
6333 {
6334 case IEMMODE_16BIT:
6335 IEMOP_MNEMONIC(cbw, "cbw");
6336 IEM_MC_BEGIN(0, 1, 0, 0);
6337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6338 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6339 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6340 } IEM_MC_ELSE() {
6341 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6342 } IEM_MC_ENDIF();
6343 IEM_MC_ADVANCE_RIP_AND_FINISH();
6344 IEM_MC_END();
6345 break;
6346
6347 case IEMMODE_32BIT:
6348 IEMOP_MNEMONIC(cwde, "cwde");
6349 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6351 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6352 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6353 } IEM_MC_ELSE() {
6354 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6355 } IEM_MC_ENDIF();
6356 IEM_MC_ADVANCE_RIP_AND_FINISH();
6357 IEM_MC_END();
6358 break;
6359
6360 case IEMMODE_64BIT:
6361 IEMOP_MNEMONIC(cdqe, "cdqe");
6362 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6364 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6365 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6366 } IEM_MC_ELSE() {
6367 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6368 } IEM_MC_ENDIF();
6369 IEM_MC_ADVANCE_RIP_AND_FINISH();
6370 IEM_MC_END();
6371 break;
6372
6373 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6374 }
6375}
6376
6377
6378/**
6379 * @opcode 0x99
6380 */
6381FNIEMOP_DEF(iemOp_cwd)
6382{
6383 switch (pVCpu->iem.s.enmEffOpSize)
6384 {
6385 case IEMMODE_16BIT:
6386 IEMOP_MNEMONIC(cwd, "cwd");
6387 IEM_MC_BEGIN(0, 1, 0, 0);
6388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6389 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6390 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6391 } IEM_MC_ELSE() {
6392 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6393 } IEM_MC_ENDIF();
6394 IEM_MC_ADVANCE_RIP_AND_FINISH();
6395 IEM_MC_END();
6396 break;
6397
6398 case IEMMODE_32BIT:
6399 IEMOP_MNEMONIC(cdq, "cdq");
6400 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6402 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6403 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6404 } IEM_MC_ELSE() {
6405 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6406 } IEM_MC_ENDIF();
6407 IEM_MC_ADVANCE_RIP_AND_FINISH();
6408 IEM_MC_END();
6409 break;
6410
6411 case IEMMODE_64BIT:
6412 IEMOP_MNEMONIC(cqo, "cqo");
6413 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6415 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6416 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6417 } IEM_MC_ELSE() {
6418 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6419 } IEM_MC_ENDIF();
6420 IEM_MC_ADVANCE_RIP_AND_FINISH();
6421 IEM_MC_END();
6422 break;
6423
6424 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6425 }
6426}
6427
6428
6429/**
6430 * @opcode 0x9a
6431 */
6432FNIEMOP_DEF(iemOp_call_Ap)
6433{
6434 IEMOP_MNEMONIC(call_Ap, "call Ap");
6435 IEMOP_HLP_NO_64BIT();
6436
6437 /* Decode the far pointer address and pass it on to the far call C implementation. */
6438 uint32_t off32Seg;
6439 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6440 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6441 else
6442 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6443 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6445 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
6446 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
6447 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6448 /** @todo make task-switches, ring-switches, ++ return non-zero status */
6449}
6450
6451
6452/** Opcode 0x9b. (aka fwait) */
6453FNIEMOP_DEF(iemOp_wait)
6454{
6455 IEMOP_MNEMONIC(wait, "wait");
6456 IEM_MC_BEGIN(0, 0, 0, 0);
6457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6458 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6459 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6460 IEM_MC_ADVANCE_RIP_AND_FINISH();
6461 IEM_MC_END();
6462}
6463
6464
6465/**
6466 * @opcode 0x9c
6467 */
6468FNIEMOP_DEF(iemOp_pushf_Fv)
6469{
6470 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6472 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6473 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6474 iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6475}
6476
6477
6478/**
6479 * @opcode 0x9d
6480 */
6481FNIEMOP_DEF(iemOp_popf_Fv)
6482{
6483 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6485 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6486 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6487 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6488 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6489}
6490
6491
6492/**
6493 * @opcode 0x9e
6494 */
6495FNIEMOP_DEF(iemOp_sahf)
6496{
6497 IEMOP_MNEMONIC(sahf, "sahf");
6498 if ( IEM_IS_64BIT_CODE(pVCpu)
6499 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6500 IEMOP_RAISE_INVALID_OPCODE_RET();
6501 IEM_MC_BEGIN(0, 2, 0, 0);
6502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6503 IEM_MC_LOCAL(uint32_t, u32Flags);
6504 IEM_MC_LOCAL(uint32_t, EFlags);
6505 IEM_MC_FETCH_EFLAGS(EFlags);
6506 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6507 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6508 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6509 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6510 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6511 IEM_MC_COMMIT_EFLAGS(EFlags);
6512 IEM_MC_ADVANCE_RIP_AND_FINISH();
6513 IEM_MC_END();
6514}
6515
6516
6517/**
6518 * @opcode 0x9f
6519 */
6520FNIEMOP_DEF(iemOp_lahf)
6521{
6522 IEMOP_MNEMONIC(lahf, "lahf");
6523 if ( IEM_IS_64BIT_CODE(pVCpu)
6524 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6525 IEMOP_RAISE_INVALID_OPCODE_RET();
6526 IEM_MC_BEGIN(0, 1, 0, 0);
6527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6528 IEM_MC_LOCAL(uint8_t, u8Flags);
6529 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6530 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6531 IEM_MC_ADVANCE_RIP_AND_FINISH();
6532 IEM_MC_END();
6533}
6534
6535
6536/**
6537 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6538 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6539 * Will return/throw on failures.
6540 * @param a_GCPtrMemOff The variable to store the offset in.
6541 */
6542#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6543 do \
6544 { \
6545 switch (pVCpu->iem.s.enmEffAddrMode) \
6546 { \
6547 case IEMMODE_16BIT: \
6548 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6549 break; \
6550 case IEMMODE_32BIT: \
6551 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6552 break; \
6553 case IEMMODE_64BIT: \
6554 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6555 break; \
6556 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6557 } \
6558 } while (0)
6559
6560/**
6561 * @opcode 0xa0
6562 */
6563FNIEMOP_DEF(iemOp_mov_AL_Ob)
6564{
6565 /*
6566 * Get the offset.
6567 */
6568 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6569 RTGCPTR GCPtrMemOff;
6570 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6571
6572 /*
6573 * Fetch AL.
6574 */
6575 IEM_MC_BEGIN(0, 1, 0, 0);
6576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6577 IEM_MC_LOCAL(uint8_t, u8Tmp);
6578 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6579 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6580 IEM_MC_ADVANCE_RIP_AND_FINISH();
6581 IEM_MC_END();
6582}
6583
6584
6585/**
6586 * @opcode 0xa1
6587 */
6588FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6589{
6590 /*
6591 * Get the offset.
6592 */
6593 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6594 RTGCPTR GCPtrMemOff;
6595 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6596
6597 /*
6598 * Fetch rAX.
6599 */
6600 switch (pVCpu->iem.s.enmEffOpSize)
6601 {
6602 case IEMMODE_16BIT:
6603 IEM_MC_BEGIN(0, 1, 0, 0);
6604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6605 IEM_MC_LOCAL(uint16_t, u16Tmp);
6606 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6607 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6608 IEM_MC_ADVANCE_RIP_AND_FINISH();
6609 IEM_MC_END();
6610 break;
6611
6612 case IEMMODE_32BIT:
6613 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6615 IEM_MC_LOCAL(uint32_t, u32Tmp);
6616 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6617 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6618 IEM_MC_ADVANCE_RIP_AND_FINISH();
6619 IEM_MC_END();
6620 break;
6621
6622 case IEMMODE_64BIT:
6623 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6625 IEM_MC_LOCAL(uint64_t, u64Tmp);
6626 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6627 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6628 IEM_MC_ADVANCE_RIP_AND_FINISH();
6629 IEM_MC_END();
6630 break;
6631
6632 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6633 }
6634}
6635
6636
6637/**
6638 * @opcode 0xa2
6639 */
6640FNIEMOP_DEF(iemOp_mov_Ob_AL)
6641{
6642 /*
6643 * Get the offset.
6644 */
6645 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6646 RTGCPTR GCPtrMemOff;
6647 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6648
6649 /*
6650 * Store AL.
6651 */
6652 IEM_MC_BEGIN(0, 1, 0, 0);
6653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6654 IEM_MC_LOCAL(uint8_t, u8Tmp);
6655 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6656 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6657 IEM_MC_ADVANCE_RIP_AND_FINISH();
6658 IEM_MC_END();
6659}
6660
6661
6662/**
6663 * @opcode 0xa3
6664 */
6665FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6666{
6667 /*
6668 * Get the offset.
6669 */
6670 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6671 RTGCPTR GCPtrMemOff;
6672 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6673
6674 /*
6675 * Store rAX.
6676 */
6677 switch (pVCpu->iem.s.enmEffOpSize)
6678 {
6679 case IEMMODE_16BIT:
6680 IEM_MC_BEGIN(0, 1, 0, 0);
6681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6682 IEM_MC_LOCAL(uint16_t, u16Tmp);
6683 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6684 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6685 IEM_MC_ADVANCE_RIP_AND_FINISH();
6686 IEM_MC_END();
6687 break;
6688
6689 case IEMMODE_32BIT:
6690 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6692 IEM_MC_LOCAL(uint32_t, u32Tmp);
6693 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6694 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6695 IEM_MC_ADVANCE_RIP_AND_FINISH();
6696 IEM_MC_END();
6697 break;
6698
6699 case IEMMODE_64BIT:
6700 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6702 IEM_MC_LOCAL(uint64_t, u64Tmp);
6703 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6704 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6705 IEM_MC_ADVANCE_RIP_AND_FINISH();
6706 IEM_MC_END();
6707 break;
6708
6709 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6710 }
6711}
6712
6713/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6714#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
6715 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6717 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6718 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6719 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6720 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6721 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6722 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6723 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6724 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6725 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6726 } IEM_MC_ELSE() { \
6727 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6728 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6729 } IEM_MC_ENDIF(); \
6730 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6731 IEM_MC_END() \
6732
6733/**
6734 * @opcode 0xa4
6735 */
6736FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6737{
6738 /*
6739 * Use the C implementation if a repeat prefix is encountered.
6740 */
6741 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6742 {
6743 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6745 switch (pVCpu->iem.s.enmEffAddrMode)
6746 {
6747 case IEMMODE_16BIT:
6748 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6749 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6750 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6751 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6752 iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6753 case IEMMODE_32BIT:
6754 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6755 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6756 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6757 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6758 iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6759 case IEMMODE_64BIT:
6760 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6761 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6762 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6763 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6764 iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6765 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6766 }
6767 }
6768
6769 /*
6770 * Sharing case implementation with movs[wdq] below.
6771 */
6772 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6773 switch (pVCpu->iem.s.enmEffAddrMode)
6774 {
6775 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6776 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6777 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
6778 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6779 }
6780}
6781
6782
6783/**
6784 * @opcode 0xa5
6785 */
6786FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6787{
6788
6789 /*
6790 * Use the C implementation if a repeat prefix is encountered.
6791 */
6792 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6793 {
6794 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6796 switch (pVCpu->iem.s.enmEffOpSize)
6797 {
6798 case IEMMODE_16BIT:
6799 switch (pVCpu->iem.s.enmEffAddrMode)
6800 {
6801 case IEMMODE_16BIT:
6802 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6803 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6804 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6805 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6806 iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6807 case IEMMODE_32BIT:
6808 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6809 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6810 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6811 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6812 iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6813 case IEMMODE_64BIT:
6814 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6815 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6816 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6817 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6818 iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6819 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6820 }
6821 break;
6822 case IEMMODE_32BIT:
6823 switch (pVCpu->iem.s.enmEffAddrMode)
6824 {
6825 case IEMMODE_16BIT:
6826 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6827 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6828 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6829 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6830 iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6831 case IEMMODE_32BIT:
6832 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6833 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6834 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6835 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6836 iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6837 case IEMMODE_64BIT:
6838 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6839 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6840 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6841 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6842 iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6843 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6844 }
6845 case IEMMODE_64BIT:
6846 switch (pVCpu->iem.s.enmEffAddrMode)
6847 {
6848 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6849 case IEMMODE_32BIT:
6850 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6851 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6852 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6853 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6854 iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6855 case IEMMODE_64BIT:
6856 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6857 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6858 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6859 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6860 iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6861 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6862 }
6863 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6864 }
6865 }
6866
6867 /*
6868 * Annoying double switch here.
6869 * Using ugly macro for implementing the cases, sharing it with movsb.
6870 */
6871 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6872 switch (pVCpu->iem.s.enmEffOpSize)
6873 {
6874 case IEMMODE_16BIT:
6875 switch (pVCpu->iem.s.enmEffAddrMode)
6876 {
6877 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6878 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6879 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
6880 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6881 }
6882 break;
6883
6884 case IEMMODE_32BIT:
6885 switch (pVCpu->iem.s.enmEffAddrMode)
6886 {
6887 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6888 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6889 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
6890 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6891 }
6892 break;
6893
6894 case IEMMODE_64BIT:
6895 switch (pVCpu->iem.s.enmEffAddrMode)
6896 {
6897 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6898 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
6899 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
6900 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6901 }
6902 break;
6903 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6904 }
6905}
6906
6907#undef IEM_MOVS_CASE
6908
6909/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6910#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
6911 IEM_MC_BEGIN(3, 3, a_fMcFlags, 0); \
6912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6913 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
6914 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6915 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6916 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6917 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6918 \
6919 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6920 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
6921 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6922 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
6923 IEM_MC_REF_LOCAL(puValue1, uValue1); \
6924 IEM_MC_REF_EFLAGS(pEFlags); \
6925 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6926 \
6927 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6928 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6929 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6930 } IEM_MC_ELSE() { \
6931 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6932 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6933 } IEM_MC_ENDIF(); \
6934 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6935 IEM_MC_END() \
6936
6937/**
6938 * @opcode 0xa6
6939 */
6940FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6941{
6942
6943 /*
6944 * Use the C implementation if a repeat prefix is encountered.
6945 */
6946 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6947 {
6948 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6950 switch (pVCpu->iem.s.enmEffAddrMode)
6951 {
6952 case IEMMODE_16BIT:
6953 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6954 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6955 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6956 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6957 iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6958 case IEMMODE_32BIT:
6959 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6960 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6961 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6962 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6963 iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6964 case IEMMODE_64BIT:
6965 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6966 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6967 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6968 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6969 iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6970 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6971 }
6972 }
6973 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6974 {
6975 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6977 switch (pVCpu->iem.s.enmEffAddrMode)
6978 {
6979 case IEMMODE_16BIT:
6980 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6981 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6982 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6983 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6984 iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6985 case IEMMODE_32BIT:
6986 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6987 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6988 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6989 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6990 iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6991 case IEMMODE_64BIT:
6992 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6993 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6994 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6995 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6996 iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6997 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6998 }
6999 }
7000
7001 /*
7002 * Sharing case implementation with cmps[wdq] below.
7003 */
7004 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
7005 switch (pVCpu->iem.s.enmEffAddrMode)
7006 {
7007 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7008 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7009 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
7010 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7011 }
7012}
7013
7014
7015/**
7016 * @opcode 0xa7
7017 */
7018FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
7019{
7020 /*
7021 * Use the C implementation if a repeat prefix is encountered.
7022 */
7023 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7024 {
7025 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
7026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7027 switch (pVCpu->iem.s.enmEffOpSize)
7028 {
7029 case IEMMODE_16BIT:
7030 switch (pVCpu->iem.s.enmEffAddrMode)
7031 {
7032 case IEMMODE_16BIT:
7033 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7034 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7035 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7036 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7037 iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7038 case IEMMODE_32BIT:
7039 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7040 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7041 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7042 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7043 iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7044 case IEMMODE_64BIT:
7045 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7046 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7047 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7048 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7049 iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7050 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7051 }
7052 break;
7053 case IEMMODE_32BIT:
7054 switch (pVCpu->iem.s.enmEffAddrMode)
7055 {
7056 case IEMMODE_16BIT:
7057 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7058 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7059 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7060 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7061 iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7062 case IEMMODE_32BIT:
7063 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7064 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7065 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7066 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7067 iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7068 case IEMMODE_64BIT:
7069 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7070 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7071 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7072 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7073 iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7074 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7075 }
7076 case IEMMODE_64BIT:
7077 switch (pVCpu->iem.s.enmEffAddrMode)
7078 {
7079 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
7080 case IEMMODE_32BIT:
7081 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7082 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7083 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7084 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7085 iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7086 case IEMMODE_64BIT:
7087 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7088 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7089 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7090 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7091 iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7092 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7093 }
7094 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7095 }
7096 }
7097
7098 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7099 {
7100 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
7101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7102 switch (pVCpu->iem.s.enmEffOpSize)
7103 {
7104 case IEMMODE_16BIT:
7105 switch (pVCpu->iem.s.enmEffAddrMode)
7106 {
7107 case IEMMODE_16BIT:
7108 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7109 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7110 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7111 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7112 iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7113 case IEMMODE_32BIT:
7114 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7115 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7116 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7117 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7118 iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7119 case IEMMODE_64BIT:
7120 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7121 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7122 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7123 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7124 iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7125 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7126 }
7127 break;
7128 case IEMMODE_32BIT:
7129 switch (pVCpu->iem.s.enmEffAddrMode)
7130 {
7131 case IEMMODE_16BIT:
7132 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7133 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7134 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7135 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7136 iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7137 case IEMMODE_32BIT:
7138 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7139 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7140 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7141 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7142 iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7143 case IEMMODE_64BIT:
7144 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7145 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7146 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7147 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7148 iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7149 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7150 }
7151 case IEMMODE_64BIT:
7152 switch (pVCpu->iem.s.enmEffAddrMode)
7153 {
7154 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
7155 case IEMMODE_32BIT:
7156 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7157 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7158 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7159 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7160 iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7161 case IEMMODE_64BIT:
7162 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7163 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7164 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7165 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7166 iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7168 }
7169 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7170 }
7171 }
7172
7173 /*
7174 * Annoying double switch here.
7175 * Using ugly macro for implementing the cases, sharing it with cmpsb.
7176 */
7177 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
7178 switch (pVCpu->iem.s.enmEffOpSize)
7179 {
7180 case IEMMODE_16BIT:
7181 switch (pVCpu->iem.s.enmEffAddrMode)
7182 {
7183 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7184 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7185 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
7186 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7187 }
7188 break;
7189
7190 case IEMMODE_32BIT:
7191 switch (pVCpu->iem.s.enmEffAddrMode)
7192 {
7193 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7194 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7195 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
7196 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7197 }
7198 break;
7199
7200 case IEMMODE_64BIT:
7201 switch (pVCpu->iem.s.enmEffAddrMode)
7202 {
7203 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7204 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
7205 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
7206 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7207 }
7208 break;
7209 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7210 }
7211}
7212
7213#undef IEM_CMPS_CASE
7214
7215/**
7216 * @opcode 0xa8
7217 */
7218FNIEMOP_DEF(iemOp_test_AL_Ib)
7219{
7220 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
7221 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7222 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
7223}
7224
7225
7226/**
7227 * @opcode 0xa9
7228 */
7229FNIEMOP_DEF(iemOp_test_eAX_Iz)
7230{
7231 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
7232 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7233 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
7234}
7235
7236
7237/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
7238#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
7239 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7241 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7242 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7243 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
7244 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7245 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7246 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7247 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7248 } IEM_MC_ELSE() { \
7249 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7250 } IEM_MC_ENDIF(); \
7251 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7252 IEM_MC_END() \
7253
7254/**
7255 * @opcode 0xaa
7256 */
7257FNIEMOP_DEF(iemOp_stosb_Yb_AL)
7258{
7259 /*
7260 * Use the C implementation if a repeat prefix is encountered.
7261 */
7262 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7263 {
7264 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
7265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7266 switch (pVCpu->iem.s.enmEffAddrMode)
7267 {
7268 case IEMMODE_16BIT:
7269 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7270 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7271 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7272 iemCImpl_stos_al_m16);
7273 case IEMMODE_32BIT:
7274 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7275 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7276 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7277 iemCImpl_stos_al_m32);
7278 case IEMMODE_64BIT:
7279 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7280 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7281 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7282 iemCImpl_stos_al_m64);
7283 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7284 }
7285 }
7286
7287 /*
7288 * Sharing case implementation with stos[wdq] below.
7289 */
7290 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
7291 switch (pVCpu->iem.s.enmEffAddrMode)
7292 {
7293 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7294 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7295 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
7296 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7297 }
7298}
7299
7300
7301/**
7302 * @opcode 0xab
7303 */
7304FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7305{
7306 /*
7307 * Use the C implementation if a repeat prefix is encountered.
7308 */
7309 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7310 {
7311 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7313 switch (pVCpu->iem.s.enmEffOpSize)
7314 {
7315 case IEMMODE_16BIT:
7316 switch (pVCpu->iem.s.enmEffAddrMode)
7317 {
7318 case IEMMODE_16BIT:
7319 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7320 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7321 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7322 iemCImpl_stos_ax_m16);
7323 case IEMMODE_32BIT:
7324 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7325 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7326 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7327 iemCImpl_stos_ax_m32);
7328 case IEMMODE_64BIT:
7329 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7330 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7331 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7332 iemCImpl_stos_ax_m64);
7333 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7334 }
7335 break;
7336 case IEMMODE_32BIT:
7337 switch (pVCpu->iem.s.enmEffAddrMode)
7338 {
7339 case IEMMODE_16BIT:
7340 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7341 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7342 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7343 iemCImpl_stos_eax_m16);
7344 case IEMMODE_32BIT:
7345 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7346 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7347 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7348 iemCImpl_stos_eax_m32);
7349 case IEMMODE_64BIT:
7350 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7351 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7352 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7353 iemCImpl_stos_eax_m64);
7354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7355 }
7356 case IEMMODE_64BIT:
7357 switch (pVCpu->iem.s.enmEffAddrMode)
7358 {
7359 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7360 case IEMMODE_32BIT:
7361 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7362 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7363 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7364 iemCImpl_stos_rax_m32);
7365 case IEMMODE_64BIT:
7366 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7367 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7368 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7369 iemCImpl_stos_rax_m64);
7370 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7371 }
7372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7373 }
7374 }
7375
7376 /*
7377 * Annoying double switch here.
7378 * Using ugly macro for implementing the cases, sharing it with stosb.
7379 */
7380 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7381 switch (pVCpu->iem.s.enmEffOpSize)
7382 {
7383 case IEMMODE_16BIT:
7384 switch (pVCpu->iem.s.enmEffAddrMode)
7385 {
7386 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7387 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7388 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7390 }
7391 break;
7392
7393 case IEMMODE_32BIT:
7394 switch (pVCpu->iem.s.enmEffAddrMode)
7395 {
7396 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7397 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7398 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7399 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7400 }
7401 break;
7402
7403 case IEMMODE_64BIT:
7404 switch (pVCpu->iem.s.enmEffAddrMode)
7405 {
7406 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7407 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7408 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7410 }
7411 break;
7412 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7413 }
7414}
7415
7416#undef IEM_STOS_CASE
7417
7418/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7419#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7420 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7422 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7423 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7424 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7425 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7426 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7427 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7428 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7429 } IEM_MC_ELSE() { \
7430 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7431 } IEM_MC_ENDIF(); \
7432 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7433 IEM_MC_END() \
7434
7435/**
7436 * @opcode 0xac
7437 */
7438FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7439{
7440 /*
7441 * Use the C implementation if a repeat prefix is encountered.
7442 */
7443 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7444 {
7445 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7447 switch (pVCpu->iem.s.enmEffAddrMode)
7448 {
7449 case IEMMODE_16BIT:
7450 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7451 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7452 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7453 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7454 iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7455 case IEMMODE_32BIT:
7456 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7457 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7458 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7459 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7460 iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7461 case IEMMODE_64BIT:
7462 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7463 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7464 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7465 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7466 iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7467 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7468 }
7469 }
7470
7471 /*
7472 * Sharing case implementation with stos[wdq] below.
7473 */
7474 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7475 switch (pVCpu->iem.s.enmEffAddrMode)
7476 {
7477 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7478 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7479 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7480 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7481 }
7482}
7483
7484
7485/**
7486 * @opcode 0xad
7487 */
7488FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7489{
7490 /*
7491 * Use the C implementation if a repeat prefix is encountered.
7492 */
7493 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7494 {
7495 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7497 switch (pVCpu->iem.s.enmEffOpSize)
7498 {
7499 case IEMMODE_16BIT:
7500 switch (pVCpu->iem.s.enmEffAddrMode)
7501 {
7502 case IEMMODE_16BIT:
7503 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7504 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7505 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7506 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7507 iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7508 case IEMMODE_32BIT:
7509 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7510 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7511 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7512 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7513 iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7514 case IEMMODE_64BIT:
7515 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7516 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7517 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7518 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7519 iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7521 }
7522 break;
7523 case IEMMODE_32BIT:
7524 switch (pVCpu->iem.s.enmEffAddrMode)
7525 {
7526 case IEMMODE_16BIT:
7527 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7528 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7529 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7530 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7531 iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7532 case IEMMODE_32BIT:
7533 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7534 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7535 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7536 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7537 iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7538 case IEMMODE_64BIT:
7539 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7540 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7541 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7542 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7543 iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7544 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7545 }
7546 case IEMMODE_64BIT:
7547 switch (pVCpu->iem.s.enmEffAddrMode)
7548 {
7549 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7550 case IEMMODE_32BIT:
7551 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7552 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7553 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7554 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7555 iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7556 case IEMMODE_64BIT:
7557 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7558 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7559 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7560 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7561 iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7563 }
7564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7565 }
7566 }
7567
7568 /*
7569 * Annoying double switch here.
7570 * Using ugly macro for implementing the cases, sharing it with lodsb.
7571 */
7572 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7573 switch (pVCpu->iem.s.enmEffOpSize)
7574 {
7575 case IEMMODE_16BIT:
7576 switch (pVCpu->iem.s.enmEffAddrMode)
7577 {
7578 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7579 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7580 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
7581 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7582 }
7583 break;
7584
7585 case IEMMODE_32BIT:
7586 switch (pVCpu->iem.s.enmEffAddrMode)
7587 {
7588 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7589 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7590 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
7591 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7592 }
7593 break;
7594
7595 case IEMMODE_64BIT:
7596 switch (pVCpu->iem.s.enmEffAddrMode)
7597 {
7598 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7599 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
7600 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
7601 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7602 }
7603 break;
7604 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7605 }
7606}
7607
7608#undef IEM_LODS_CASE
7609
7610/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7611#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
7612 IEM_MC_BEGIN(3, 2, a_fMcFlags, 0); \
7613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7614 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7615 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7616 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7617 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7618 \
7619 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7620 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7621 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7622 IEM_MC_REF_EFLAGS(pEFlags); \
7623 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7624 \
7625 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7626 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7627 } IEM_MC_ELSE() { \
7628 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7629 } IEM_MC_ENDIF(); \
7630 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7631 IEM_MC_END();
7632
7633/**
7634 * @opcode 0xae
7635 */
7636FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7637{
7638 /*
7639 * Use the C implementation if a repeat prefix is encountered.
7640 */
7641 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7642 {
7643 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7645 switch (pVCpu->iem.s.enmEffAddrMode)
7646 {
7647 case IEMMODE_16BIT:
7648 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7649 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7650 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7651 iemCImpl_repe_scas_al_m16);
7652 case IEMMODE_32BIT:
7653 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7654 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7655 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7656 iemCImpl_repe_scas_al_m32);
7657 case IEMMODE_64BIT:
7658 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7659 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7660 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7661 iemCImpl_repe_scas_al_m64);
7662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7663 }
7664 }
7665 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7666 {
7667 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7669 switch (pVCpu->iem.s.enmEffAddrMode)
7670 {
7671 case IEMMODE_16BIT:
7672 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7673 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7674 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7675 iemCImpl_repne_scas_al_m16);
7676 case IEMMODE_32BIT:
7677 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7678 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7679 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7680 iemCImpl_repne_scas_al_m32);
7681 case IEMMODE_64BIT:
7682 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7683 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7684 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7685 iemCImpl_repne_scas_al_m64);
7686 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7687 }
7688 }
7689
7690 /*
7691 * Sharing case implementation with stos[wdq] below.
7692 */
7693 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7694 switch (pVCpu->iem.s.enmEffAddrMode)
7695 {
7696 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7697 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7698 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
7699 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7700 }
7701}
7702
7703
7704/**
7705 * @opcode 0xaf
7706 */
7707FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7708{
7709 /*
7710 * Use the C implementation if a repeat prefix is encountered.
7711 */
7712 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7713 {
7714 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7716 switch (pVCpu->iem.s.enmEffOpSize)
7717 {
7718 case IEMMODE_16BIT:
7719 switch (pVCpu->iem.s.enmEffAddrMode)
7720 {
7721 case IEMMODE_16BIT:
7722 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7723 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7724 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7725 iemCImpl_repe_scas_ax_m16);
7726 case IEMMODE_32BIT:
7727 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7728 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7729 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7730 iemCImpl_repe_scas_ax_m32);
7731 case IEMMODE_64BIT:
7732 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7733 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7734 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7735 iemCImpl_repe_scas_ax_m64);
7736 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7737 }
7738 break;
7739 case IEMMODE_32BIT:
7740 switch (pVCpu->iem.s.enmEffAddrMode)
7741 {
7742 case IEMMODE_16BIT:
7743 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7744 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7745 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7746 iemCImpl_repe_scas_eax_m16);
7747 case IEMMODE_32BIT:
7748 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7749 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7750 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7751 iemCImpl_repe_scas_eax_m32);
7752 case IEMMODE_64BIT:
7753 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7754 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7755 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7756 iemCImpl_repe_scas_eax_m64);
7757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7758 }
7759 case IEMMODE_64BIT:
7760 switch (pVCpu->iem.s.enmEffAddrMode)
7761 {
7762 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7763 case IEMMODE_32BIT:
7764 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7765 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7766 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7767 iemCImpl_repe_scas_rax_m32);
7768 case IEMMODE_64BIT:
7769 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7770 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7771 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7772 iemCImpl_repe_scas_rax_m64);
7773 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7774 }
7775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7776 }
7777 }
7778 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7779 {
7780 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7782 switch (pVCpu->iem.s.enmEffOpSize)
7783 {
7784 case IEMMODE_16BIT:
7785 switch (pVCpu->iem.s.enmEffAddrMode)
7786 {
7787 case IEMMODE_16BIT:
7788 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7789 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7790 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7791 iemCImpl_repne_scas_ax_m16);
7792 case IEMMODE_32BIT:
7793 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7794 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7795 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7796 iemCImpl_repne_scas_ax_m32);
7797 case IEMMODE_64BIT:
7798 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7799 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7800 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7801 iemCImpl_repne_scas_ax_m64);
7802 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7803 }
7804 break;
7805 case IEMMODE_32BIT:
7806 switch (pVCpu->iem.s.enmEffAddrMode)
7807 {
7808 case IEMMODE_16BIT:
7809 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7810 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7811 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7812 iemCImpl_repne_scas_eax_m16);
7813 case IEMMODE_32BIT:
7814 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7815 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7816 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7817 iemCImpl_repne_scas_eax_m32);
7818 case IEMMODE_64BIT:
7819 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7820 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7821 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7822 iemCImpl_repne_scas_eax_m64);
7823 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7824 }
7825 case IEMMODE_64BIT:
7826 switch (pVCpu->iem.s.enmEffAddrMode)
7827 {
7828 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
7829 case IEMMODE_32BIT:
7830 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7831 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7832 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7833 iemCImpl_repne_scas_rax_m32);
7834 case IEMMODE_64BIT:
7835 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7836 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7837 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7838 iemCImpl_repne_scas_rax_m64);
7839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7840 }
7841 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7842 }
7843 }
7844
7845 /*
7846 * Annoying double switch here.
7847 * Using ugly macro for implementing the cases, sharing it with scasb.
7848 */
7849 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
7850 switch (pVCpu->iem.s.enmEffOpSize)
7851 {
7852 case IEMMODE_16BIT:
7853 switch (pVCpu->iem.s.enmEffAddrMode)
7854 {
7855 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7856 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7857 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
7858 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7859 }
7860 break;
7861
7862 case IEMMODE_32BIT:
7863 switch (pVCpu->iem.s.enmEffAddrMode)
7864 {
7865 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7866 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7867 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
7868 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7869 }
7870 break;
7871
7872 case IEMMODE_64BIT:
7873 switch (pVCpu->iem.s.enmEffAddrMode)
7874 {
7875 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7876 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
7877 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
7878 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7879 }
7880 break;
7881 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7882 }
7883}
7884
7885#undef IEM_SCAS_CASE
7886
7887/**
7888 * Common 'mov r8, imm8' helper.
7889 */
7890FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
7891{
7892 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7893 IEM_MC_BEGIN(0, 0, 0, 0);
7894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7895 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
7896 IEM_MC_ADVANCE_RIP_AND_FINISH();
7897 IEM_MC_END();
7898}
7899
7900
7901/**
7902 * @opcode 0xb0
7903 */
7904FNIEMOP_DEF(iemOp_mov_AL_Ib)
7905{
7906 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
7907 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7908}
7909
7910
7911/**
7912 * @opcode 0xb1
7913 */
7914FNIEMOP_DEF(iemOp_CL_Ib)
7915{
7916 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
7917 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7918}
7919
7920
7921/**
7922 * @opcode 0xb2
7923 */
7924FNIEMOP_DEF(iemOp_DL_Ib)
7925{
7926 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
7927 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7928}
7929
7930
7931/**
7932 * @opcode 0xb3
7933 */
7934FNIEMOP_DEF(iemOp_BL_Ib)
7935{
7936 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
7937 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7938}
7939
7940
7941/**
7942 * @opcode 0xb4
7943 */
7944FNIEMOP_DEF(iemOp_mov_AH_Ib)
7945{
7946 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
7947 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7948}
7949
7950
7951/**
7952 * @opcode 0xb5
7953 */
7954FNIEMOP_DEF(iemOp_CH_Ib)
7955{
7956 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
7957 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7958}
7959
7960
7961/**
7962 * @opcode 0xb6
7963 */
7964FNIEMOP_DEF(iemOp_DH_Ib)
7965{
7966 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
7967 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7968}
7969
7970
7971/**
7972 * @opcode 0xb7
7973 */
7974FNIEMOP_DEF(iemOp_BH_Ib)
7975{
7976 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
7977 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7978}
7979
7980
7981/**
7982 * Common 'mov regX,immX' helper.
7983 */
7984FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
7985{
7986 switch (pVCpu->iem.s.enmEffOpSize)
7987 {
7988 case IEMMODE_16BIT:
7989 IEM_MC_BEGIN(0, 0, 0, 0);
7990 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7992 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
7993 IEM_MC_ADVANCE_RIP_AND_FINISH();
7994 IEM_MC_END();
7995 break;
7996
7997 case IEMMODE_32BIT:
7998 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7999 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8001 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
8002 IEM_MC_ADVANCE_RIP_AND_FINISH();
8003 IEM_MC_END();
8004 break;
8005
8006 case IEMMODE_64BIT:
8007 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8008 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
8009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8010 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
8011 IEM_MC_ADVANCE_RIP_AND_FINISH();
8012 IEM_MC_END();
8013 break;
8014 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8015 }
8016}
8017
8018
8019/**
8020 * @opcode 0xb8
8021 */
8022FNIEMOP_DEF(iemOp_eAX_Iv)
8023{
8024 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
8025 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8026}
8027
8028
8029/**
8030 * @opcode 0xb9
8031 */
8032FNIEMOP_DEF(iemOp_eCX_Iv)
8033{
8034 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
8035 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8036}
8037
8038
8039/**
8040 * @opcode 0xba
8041 */
8042FNIEMOP_DEF(iemOp_eDX_Iv)
8043{
8044 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
8045 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8046}
8047
8048
8049/**
8050 * @opcode 0xbb
8051 */
8052FNIEMOP_DEF(iemOp_eBX_Iv)
8053{
8054 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
8055 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8056}
8057
8058
8059/**
8060 * @opcode 0xbc
8061 */
8062FNIEMOP_DEF(iemOp_eSP_Iv)
8063{
8064 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
8065 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8066}
8067
8068
8069/**
8070 * @opcode 0xbd
8071 */
8072FNIEMOP_DEF(iemOp_eBP_Iv)
8073{
8074 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
8075 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8076}
8077
8078
8079/**
8080 * @opcode 0xbe
8081 */
8082FNIEMOP_DEF(iemOp_eSI_Iv)
8083{
8084 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
8085 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8086}
8087
8088
8089/**
8090 * @opcode 0xbf
8091 */
8092FNIEMOP_DEF(iemOp_eDI_Iv)
8093{
8094 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
8095 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8096}
8097
8098
8099/**
8100 * @opcode 0xc0
8101 */
8102FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
8103{
8104 IEMOP_HLP_MIN_186();
8105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8106 PCIEMOPSHIFTSIZES pImpl;
8107 switch (IEM_GET_MODRM_REG_8(bRm))
8108 {
8109 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
8110 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
8111 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
8112 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
8113 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
8114 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
8115 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
8116 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8117 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8118 }
8119 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8120
8121 if (IEM_IS_MODRM_REG_MODE(bRm))
8122 {
8123 /* register */
8124 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8125 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
8126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8127 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8128 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8129 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8130 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8131 IEM_MC_REF_EFLAGS(pEFlags);
8132 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8133 IEM_MC_ADVANCE_RIP_AND_FINISH();
8134 IEM_MC_END();
8135 }
8136 else
8137 {
8138 /* memory */
8139 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0);
8140 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8142
8143 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8145
8146 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8147 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8148 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8149
8150 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8151 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8152 IEM_MC_FETCH_EFLAGS(EFlags);
8153 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8154
8155 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8156 IEM_MC_COMMIT_EFLAGS(EFlags);
8157 IEM_MC_ADVANCE_RIP_AND_FINISH();
8158 IEM_MC_END();
8159 }
8160}
8161
8162
8163/**
8164 * @opcode 0xc1
8165 */
8166FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
8167{
8168 IEMOP_HLP_MIN_186();
8169 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8170 PCIEMOPSHIFTSIZES pImpl;
8171 switch (IEM_GET_MODRM_REG_8(bRm))
8172 {
8173 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
8174 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
8175 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
8176 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
8177 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
8178 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
8179 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
8180 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8181 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8182 }
8183 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8184
8185 if (IEM_IS_MODRM_REG_MODE(bRm))
8186 {
8187 /* register */
8188 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8189 switch (pVCpu->iem.s.enmEffOpSize)
8190 {
8191 case IEMMODE_16BIT:
8192 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
8193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8194 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8195 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8196 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8197 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8198 IEM_MC_REF_EFLAGS(pEFlags);
8199 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8200 IEM_MC_ADVANCE_RIP_AND_FINISH();
8201 IEM_MC_END();
8202 break;
8203
8204 case IEMMODE_32BIT:
8205 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8207 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8208 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8209 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8210 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8211 IEM_MC_REF_EFLAGS(pEFlags);
8212 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8213 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
8214 IEM_MC_ADVANCE_RIP_AND_FINISH();
8215 IEM_MC_END();
8216 break;
8217
8218 case IEMMODE_64BIT:
8219 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8221 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8222 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8223 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8224 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8225 IEM_MC_REF_EFLAGS(pEFlags);
8226 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8227 IEM_MC_ADVANCE_RIP_AND_FINISH();
8228 IEM_MC_END();
8229 break;
8230
8231 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8232 }
8233 }
8234 else
8235 {
8236 /* memory */
8237 switch (pVCpu->iem.s.enmEffOpSize)
8238 {
8239 case IEMMODE_16BIT:
8240 IEM_MC_BEGIN(3, 3, 0, 0);
8241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8243
8244 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8246
8247 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8248 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8249 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8250
8251 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8252 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8253 IEM_MC_FETCH_EFLAGS(EFlags);
8254 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8255
8256 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
8257 IEM_MC_COMMIT_EFLAGS(EFlags);
8258 IEM_MC_ADVANCE_RIP_AND_FINISH();
8259 IEM_MC_END();
8260 break;
8261
8262 case IEMMODE_32BIT:
8263 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8266
8267 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8269
8270 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8271 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8272 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8273
8274 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8275 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8276 IEM_MC_FETCH_EFLAGS(EFlags);
8277 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8278
8279 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
8280 IEM_MC_COMMIT_EFLAGS(EFlags);
8281 IEM_MC_ADVANCE_RIP_AND_FINISH();
8282 IEM_MC_END();
8283 break;
8284
8285 case IEMMODE_64BIT:
8286 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8289
8290 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8292
8293 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8294 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8295 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8296
8297 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8298 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8299 IEM_MC_FETCH_EFLAGS(EFlags);
8300 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8301
8302 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
8303 IEM_MC_COMMIT_EFLAGS(EFlags);
8304 IEM_MC_ADVANCE_RIP_AND_FINISH();
8305 IEM_MC_END();
8306 break;
8307
8308 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8309 }
8310 }
8311}
8312
8313
8314/**
8315 * @opcode 0xc2
8316 */
8317FNIEMOP_DEF(iemOp_retn_Iw)
8318{
8319 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
8320 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8321 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8323 switch (pVCpu->iem.s.enmEffOpSize)
8324 {
8325 case IEMMODE_16BIT:
8326 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_iw_16, u16Imm);
8327 case IEMMODE_32BIT:
8328 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_iw_32, u16Imm);
8329 case IEMMODE_64BIT:
8330 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_iw_64, u16Imm);
8331 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8332 }
8333}
8334
8335
8336/**
8337 * @opcode 0xc3
8338 */
8339FNIEMOP_DEF(iemOp_retn)
8340{
8341 IEMOP_MNEMONIC(retn, "retn");
8342 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8344 switch (pVCpu->iem.s.enmEffOpSize)
8345 {
8346 case IEMMODE_16BIT:
8347 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_16);
8348 case IEMMODE_32BIT:
8349 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_32);
8350 case IEMMODE_64BIT:
8351 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_64);
8352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8353 }
8354}
8355
8356
8357/**
8358 * @opcode 0xc4
8359 */
8360FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
8361{
8362 /* The LDS instruction is invalid 64-bit mode. In legacy and
8363 compatability mode it is invalid with MOD=3.
8364 The use as a VEX prefix is made possible by assigning the inverted
8365 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
8366 outside of 64-bit mode. VEX is not available in real or v86 mode. */
8367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8368 if ( IEM_IS_64BIT_CODE(pVCpu)
8369 || IEM_IS_MODRM_REG_MODE(bRm) )
8370 {
8371 IEMOP_MNEMONIC(vex3_prefix, "vex3");
8372 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8373 {
8374 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8375 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8376 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
8377 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8378 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8379 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
8380 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
8381 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8382 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
8383 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
8384 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
8385 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
8386 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
8387
8388 switch (bRm & 0x1f)
8389 {
8390 case 1: /* 0x0f lead opcode byte. */
8391#ifdef IEM_WITH_VEX
8392 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8393#else
8394 IEMOP_BITCH_ABOUT_STUB();
8395 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8396#endif
8397
8398 case 2: /* 0x0f 0x38 lead opcode bytes. */
8399#ifdef IEM_WITH_VEX
8400 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8401#else
8402 IEMOP_BITCH_ABOUT_STUB();
8403 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8404#endif
8405
8406 case 3: /* 0x0f 0x3a lead opcode bytes. */
8407#ifdef IEM_WITH_VEX
8408 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8409#else
8410 IEMOP_BITCH_ABOUT_STUB();
8411 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8412#endif
8413
8414 default:
8415 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
8416 IEMOP_RAISE_INVALID_OPCODE_RET();
8417 }
8418 }
8419 Log(("VEX3: VEX support disabled!\n"));
8420 IEMOP_RAISE_INVALID_OPCODE_RET();
8421 }
8422
8423 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
8424 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
8425}
8426
8427
8428/**
8429 * @opcode 0xc5
8430 */
8431FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
8432{
8433 /* The LES instruction is invalid 64-bit mode. In legacy and
8434 compatability mode it is invalid with MOD=3.
8435 The use as a VEX prefix is made possible by assigning the inverted
8436 REX.R to the top MOD bit, and the top bit in the inverted register
8437 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
8438 to accessing registers 0..7 in this VEX form. */
8439 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8440 if ( IEM_IS_64BIT_CODE(pVCpu)
8441 || IEM_IS_MODRM_REG_MODE(bRm))
8442 {
8443 IEMOP_MNEMONIC(vex2_prefix, "vex2");
8444 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8445 {
8446 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8447 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8448 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8449 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8450 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8451 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
8452 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
8453 pVCpu->iem.s.idxPrefix = bRm & 0x3;
8454
8455#ifdef IEM_WITH_VEX
8456 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8457#else
8458 IEMOP_BITCH_ABOUT_STUB();
8459 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8460#endif
8461 }
8462
8463 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
8464 Log(("VEX2: VEX support disabled!\n"));
8465 IEMOP_RAISE_INVALID_OPCODE_RET();
8466 }
8467
8468 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
8469 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
8470}
8471
8472
8473/**
8474 * @opcode 0xc6
8475 */
8476FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
8477{
8478 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8479 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8480 IEMOP_RAISE_INVALID_OPCODE_RET();
8481 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
8482
8483 if (IEM_IS_MODRM_REG_MODE(bRm))
8484 {
8485 /* register access */
8486 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8487 IEM_MC_BEGIN(0, 0, 0, 0);
8488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8489 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
8490 IEM_MC_ADVANCE_RIP_AND_FINISH();
8491 IEM_MC_END();
8492 }
8493 else
8494 {
8495 /* memory access. */
8496 IEM_MC_BEGIN(0, 1, 0, 0);
8497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8499 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8501 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
8502 IEM_MC_ADVANCE_RIP_AND_FINISH();
8503 IEM_MC_END();
8504 }
8505}
8506
8507
8508/**
8509 * @opcode 0xc7
8510 */
8511FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
8512{
8513 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8514 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
8515 IEMOP_RAISE_INVALID_OPCODE_RET();
8516 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
8517
8518 if (IEM_IS_MODRM_REG_MODE(bRm))
8519 {
8520 /* register access */
8521 switch (pVCpu->iem.s.enmEffOpSize)
8522 {
8523 case IEMMODE_16BIT:
8524 IEM_MC_BEGIN(0, 0, 0, 0);
8525 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8527 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8528 IEM_MC_ADVANCE_RIP_AND_FINISH();
8529 IEM_MC_END();
8530 break;
8531
8532 case IEMMODE_32BIT:
8533 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8534 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8536 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8537 IEM_MC_ADVANCE_RIP_AND_FINISH();
8538 IEM_MC_END();
8539 break;
8540
8541 case IEMMODE_64BIT:
8542 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
8543 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8545 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8546 IEM_MC_ADVANCE_RIP_AND_FINISH();
8547 IEM_MC_END();
8548 break;
8549
8550 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8551 }
8552 }
8553 else
8554 {
8555 /* memory access. */
8556 switch (pVCpu->iem.s.enmEffOpSize)
8557 {
8558 case IEMMODE_16BIT:
8559 IEM_MC_BEGIN(0, 1, 0, 0);
8560 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8562 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8564 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8565 IEM_MC_ADVANCE_RIP_AND_FINISH();
8566 IEM_MC_END();
8567 break;
8568
8569 case IEMMODE_32BIT:
8570 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8573 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8575 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8576 IEM_MC_ADVANCE_RIP_AND_FINISH();
8577 IEM_MC_END();
8578 break;
8579
8580 case IEMMODE_64BIT:
8581 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8584 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8586 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8587 IEM_MC_ADVANCE_RIP_AND_FINISH();
8588 IEM_MC_END();
8589 break;
8590
8591 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8592 }
8593 }
8594}
8595
8596
8597
8598
8599/**
8600 * @opcode 0xc8
8601 */
8602FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8603{
8604 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8605 IEMOP_HLP_MIN_186();
8606 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8607 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
8608 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
8609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8610 IEM_MC_DEFER_TO_CIMPL_3_RET(0,
8611 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8612 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8613 iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
8614}
8615
8616
8617/**
8618 * @opcode 0xc9
8619 */
8620FNIEMOP_DEF(iemOp_leave)
8621{
8622 IEMOP_MNEMONIC(leave, "leave");
8623 IEMOP_HLP_MIN_186();
8624 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8626 IEM_MC_DEFER_TO_CIMPL_1_RET(0,
8627 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8628 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8629 iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
8630}
8631
8632
8633/**
8634 * @opcode 0xca
8635 */
8636FNIEMOP_DEF(iemOp_retf_Iw)
8637{
8638 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
8639 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8641 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8642 | IEM_CIMPL_F_MODE,
8643 RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8644 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8645 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8646 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8647 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8648 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8649 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8650 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8651 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8652 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8653 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8654 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS),
8655 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
8656}
8657
8658
8659/**
8660 * @opcode 0xcb
8661 */
8662FNIEMOP_DEF(iemOp_retf)
8663{
8664 IEMOP_MNEMONIC(retf, "retf");
8665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8666 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8667 | IEM_CIMPL_F_MODE,
8668 RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8669 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8670 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8671 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8672 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8673 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8674 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8675 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8676 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8677 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8678 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8679 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS),
8680 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
8681}
8682
8683
8684/**
8685 * @opcode 0xcc
8686 */
8687FNIEMOP_DEF(iemOp_int3)
8688{
8689 IEMOP_MNEMONIC(int3, "int3");
8690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8691 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8692 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
8693 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
8694}
8695
8696
8697/**
8698 * @opcode 0xcd
8699 */
8700FNIEMOP_DEF(iemOp_int_Ib)
8701{
8702 IEMOP_MNEMONIC(int_Ib, "int Ib");
8703 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
8704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8705 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8706 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, UINT64_MAX,
8707 iemCImpl_int, u8Int, IEMINT_INTN);
8708 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8709}
8710
8711
8712/**
8713 * @opcode 0xce
8714 */
8715FNIEMOP_DEF(iemOp_into)
8716{
8717 IEMOP_MNEMONIC(into, "into");
8718 IEMOP_HLP_NO_64BIT();
8719 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8720 | IEM_CIMPL_F_BRANCH_CONDITIONAL | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8721 UINT64_MAX,
8722 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
8723 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8724}
8725
8726
8727/**
8728 * @opcode 0xcf
8729 */
8730FNIEMOP_DEF(iemOp_iret)
8731{
8732 IEMOP_MNEMONIC(iret, "iret");
8733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8734 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8735 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
8736 RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8737 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8738 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8739 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8740 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8741 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8742 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8743 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8744 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8745 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8746 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8747 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS),
8748 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
8749 /* Segment registers are sanitized when returning to an outer ring, or fully
8750 reloaded when returning to v86 mode. Thus the large flush list above. */
8751}
8752
8753
8754/**
8755 * @opcode 0xd0
8756 */
8757FNIEMOP_DEF(iemOp_Grp2_Eb_1)
8758{
8759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8760 PCIEMOPSHIFTSIZES pImpl;
8761 switch (IEM_GET_MODRM_REG_8(bRm))
8762 {
8763 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
8764 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
8765 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
8766 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
8767 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
8768 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
8769 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
8770 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8771 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8772 }
8773 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8774
8775 if (IEM_IS_MODRM_REG_MODE(bRm))
8776 {
8777 /* register */
8778 IEM_MC_BEGIN(3, 0, 0, 0);
8779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8780 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8781 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8782 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8783 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8784 IEM_MC_REF_EFLAGS(pEFlags);
8785 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8786 IEM_MC_ADVANCE_RIP_AND_FINISH();
8787 IEM_MC_END();
8788 }
8789 else
8790 {
8791 /* memory */
8792 IEM_MC_BEGIN(3, 3, 0, 0);
8793 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8794 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8795 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8797 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8798
8799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8801 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8802 IEM_MC_FETCH_EFLAGS(EFlags);
8803 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8804
8805 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8806 IEM_MC_COMMIT_EFLAGS(EFlags);
8807 IEM_MC_ADVANCE_RIP_AND_FINISH();
8808 IEM_MC_END();
8809 }
8810}
8811
8812
8813
8814/**
8815 * @opcode 0xd1
8816 */
8817FNIEMOP_DEF(iemOp_Grp2_Ev_1)
8818{
8819 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8820 PCIEMOPSHIFTSIZES pImpl;
8821 switch (IEM_GET_MODRM_REG_8(bRm))
8822 {
8823 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
8824 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
8825 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
8826 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
8827 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
8828 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
8829 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
8830 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8831 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8832 }
8833 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8834
8835 if (IEM_IS_MODRM_REG_MODE(bRm))
8836 {
8837 /* register */
8838 switch (pVCpu->iem.s.enmEffOpSize)
8839 {
8840 case IEMMODE_16BIT:
8841 IEM_MC_BEGIN(3, 0, 0, 0);
8842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8843 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8844 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8845 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8846 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8847 IEM_MC_REF_EFLAGS(pEFlags);
8848 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8849 IEM_MC_ADVANCE_RIP_AND_FINISH();
8850 IEM_MC_END();
8851 break;
8852
8853 case IEMMODE_32BIT:
8854 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8856 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8857 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8858 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8859 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8860 IEM_MC_REF_EFLAGS(pEFlags);
8861 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8862 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
8863 IEM_MC_ADVANCE_RIP_AND_FINISH();
8864 IEM_MC_END();
8865 break;
8866
8867 case IEMMODE_64BIT:
8868 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8870 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8871 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8872 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8873 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8874 IEM_MC_REF_EFLAGS(pEFlags);
8875 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8876 IEM_MC_ADVANCE_RIP_AND_FINISH();
8877 IEM_MC_END();
8878 break;
8879
8880 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8881 }
8882 }
8883 else
8884 {
8885 /* memory */
8886 switch (pVCpu->iem.s.enmEffOpSize)
8887 {
8888 case IEMMODE_16BIT:
8889 IEM_MC_BEGIN(3, 3, 0, 0);
8890 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8891 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8892 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8894 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8895
8896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8898 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8899 IEM_MC_FETCH_EFLAGS(EFlags);
8900 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8901
8902 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
8903 IEM_MC_COMMIT_EFLAGS(EFlags);
8904 IEM_MC_ADVANCE_RIP_AND_FINISH();
8905 IEM_MC_END();
8906 break;
8907
8908 case IEMMODE_32BIT:
8909 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8910 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8911 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8912 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8914 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8915
8916 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8918 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8919 IEM_MC_FETCH_EFLAGS(EFlags);
8920 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8921
8922 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
8923 IEM_MC_COMMIT_EFLAGS(EFlags);
8924 IEM_MC_ADVANCE_RIP_AND_FINISH();
8925 IEM_MC_END();
8926 break;
8927
8928 case IEMMODE_64BIT:
8929 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8930 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8931 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8932 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8933 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8934 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8935
8936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8938 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8939 IEM_MC_FETCH_EFLAGS(EFlags);
8940 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8941
8942 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
8943 IEM_MC_COMMIT_EFLAGS(EFlags);
8944 IEM_MC_ADVANCE_RIP_AND_FINISH();
8945 IEM_MC_END();
8946 break;
8947
8948 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8949 }
8950 }
8951}
8952
8953
8954/**
8955 * @opcode 0xd2
8956 */
8957FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
8958{
8959 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8960 PCIEMOPSHIFTSIZES pImpl;
8961 switch (IEM_GET_MODRM_REG_8(bRm))
8962 {
8963 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
8964 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
8965 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
8966 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
8967 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
8968 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
8969 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
8970 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8971 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
8972 }
8973 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8974
8975 if (IEM_IS_MODRM_REG_MODE(bRm))
8976 {
8977 /* register */
8978 IEM_MC_BEGIN(3, 0, 0, 0);
8979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8980 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8981 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8982 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8983 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8984 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8985 IEM_MC_REF_EFLAGS(pEFlags);
8986 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8987 IEM_MC_ADVANCE_RIP_AND_FINISH();
8988 IEM_MC_END();
8989 }
8990 else
8991 {
8992 /* memory */
8993 IEM_MC_BEGIN(3, 3, 0, 0);
8994 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8995 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8996 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8997 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8998 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8999
9000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9002 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9003 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9004 IEM_MC_FETCH_EFLAGS(EFlags);
9005 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
9006
9007 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
9008 IEM_MC_COMMIT_EFLAGS(EFlags);
9009 IEM_MC_ADVANCE_RIP_AND_FINISH();
9010 IEM_MC_END();
9011 }
9012}
9013
9014
9015/**
9016 * @opcode 0xd3
9017 */
9018FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
9019{
9020 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9021 PCIEMOPSHIFTSIZES pImpl;
9022 switch (IEM_GET_MODRM_REG_8(bRm))
9023 {
9024 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
9025 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
9026 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
9027 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
9028 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
9029 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
9030 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
9031 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9032 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
9033 }
9034 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
9035
9036 if (IEM_IS_MODRM_REG_MODE(bRm))
9037 {
9038 /* register */
9039 switch (pVCpu->iem.s.enmEffOpSize)
9040 {
9041 case IEMMODE_16BIT:
9042 IEM_MC_BEGIN(3, 0, 0, 0);
9043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9044 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9045 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9046 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9047 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9048 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9049 IEM_MC_REF_EFLAGS(pEFlags);
9050 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
9051 IEM_MC_ADVANCE_RIP_AND_FINISH();
9052 IEM_MC_END();
9053 break;
9054
9055 case IEMMODE_32BIT:
9056 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
9057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9058 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9059 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9060 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9061 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9062 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9063 IEM_MC_REF_EFLAGS(pEFlags);
9064 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
9065 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
9066 IEM_MC_ADVANCE_RIP_AND_FINISH();
9067 IEM_MC_END();
9068 break;
9069
9070 case IEMMODE_64BIT:
9071 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
9072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9073 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9074 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9075 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9076 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9077 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9078 IEM_MC_REF_EFLAGS(pEFlags);
9079 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
9080 IEM_MC_ADVANCE_RIP_AND_FINISH();
9081 IEM_MC_END();
9082 break;
9083
9084 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9085 }
9086 }
9087 else
9088 {
9089 /* memory */
9090 switch (pVCpu->iem.s.enmEffOpSize)
9091 {
9092 case IEMMODE_16BIT:
9093 IEM_MC_BEGIN(3, 3, 0, 0);
9094 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9095 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9096 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9098 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9099
9100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9102 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9103 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9104 IEM_MC_FETCH_EFLAGS(EFlags);
9105 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
9106
9107 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
9108 IEM_MC_COMMIT_EFLAGS(EFlags);
9109 IEM_MC_ADVANCE_RIP_AND_FINISH();
9110 IEM_MC_END();
9111 break;
9112
9113 case IEMMODE_32BIT:
9114 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
9115 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9116 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9117 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9119 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9120
9121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9123 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9124 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9125 IEM_MC_FETCH_EFLAGS(EFlags);
9126 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
9127
9128 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
9129 IEM_MC_COMMIT_EFLAGS(EFlags);
9130 IEM_MC_ADVANCE_RIP_AND_FINISH();
9131 IEM_MC_END();
9132 break;
9133
9134 case IEMMODE_64BIT:
9135 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
9136 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9137 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9138 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9140 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9141
9142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9144 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9145 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9146 IEM_MC_FETCH_EFLAGS(EFlags);
9147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
9148
9149 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
9150 IEM_MC_COMMIT_EFLAGS(EFlags);
9151 IEM_MC_ADVANCE_RIP_AND_FINISH();
9152 IEM_MC_END();
9153 break;
9154
9155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9156 }
9157 }
9158}
9159
9160/**
9161 * @opcode 0xd4
9162 */
9163FNIEMOP_DEF(iemOp_aam_Ib)
9164{
9165 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
9166 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9168 IEMOP_HLP_NO_64BIT();
9169 if (!bImm)
9170 IEMOP_RAISE_DIVIDE_ERROR_RET();
9171 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aam, bImm);
9172}
9173
9174
9175/**
9176 * @opcode 0xd5
9177 */
9178FNIEMOP_DEF(iemOp_aad_Ib)
9179{
9180 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
9181 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9183 IEMOP_HLP_NO_64BIT();
9184 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aad, bImm);
9185}
9186
9187
9188/**
9189 * @opcode 0xd6
9190 */
9191FNIEMOP_DEF(iemOp_salc)
9192{
9193 IEMOP_MNEMONIC(salc, "salc");
9194 IEMOP_HLP_NO_64BIT();
9195
9196 IEM_MC_BEGIN(0, 0, 0, 0);
9197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9198 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9199 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
9200 } IEM_MC_ELSE() {
9201 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
9202 } IEM_MC_ENDIF();
9203 IEM_MC_ADVANCE_RIP_AND_FINISH();
9204 IEM_MC_END();
9205}
9206
9207
9208/**
9209 * @opcode 0xd7
9210 */
9211FNIEMOP_DEF(iemOp_xlat)
9212{
9213 IEMOP_MNEMONIC(xlat, "xlat");
9214 switch (pVCpu->iem.s.enmEffAddrMode)
9215 {
9216 case IEMMODE_16BIT:
9217 IEM_MC_BEGIN(2, 0, 0, 0);
9218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9219 IEM_MC_LOCAL(uint8_t, u8Tmp);
9220 IEM_MC_LOCAL(uint16_t, u16Addr);
9221 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
9222 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
9223 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
9224 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9225 IEM_MC_ADVANCE_RIP_AND_FINISH();
9226 IEM_MC_END();
9227 break;
9228
9229 case IEMMODE_32BIT:
9230 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
9231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9232 IEM_MC_LOCAL(uint8_t, u8Tmp);
9233 IEM_MC_LOCAL(uint32_t, u32Addr);
9234 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
9235 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
9236 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
9237 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9238 IEM_MC_ADVANCE_RIP_AND_FINISH();
9239 IEM_MC_END();
9240 break;
9241
9242 case IEMMODE_64BIT:
9243 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
9244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9245 IEM_MC_LOCAL(uint8_t, u8Tmp);
9246 IEM_MC_LOCAL(uint64_t, u64Addr);
9247 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
9248 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
9249 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
9250 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9251 IEM_MC_ADVANCE_RIP_AND_FINISH();
9252 IEM_MC_END();
9253 break;
9254
9255 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9256 }
9257}
9258
9259
9260/**
9261 * Common worker for FPU instructions working on ST0 and STn, and storing the
9262 * result in ST0.
9263 *
9264 * @param bRm Mod R/M byte.
9265 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9266 */
9267FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9268{
9269 IEM_MC_BEGIN(3, 1, 0, 0);
9270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9271 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9272 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9273 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9274 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9275
9276 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9277 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9278 IEM_MC_PREPARE_FPU_USAGE();
9279 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9280 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9281 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9282 } IEM_MC_ELSE() {
9283 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9284 } IEM_MC_ENDIF();
9285 IEM_MC_ADVANCE_RIP_AND_FINISH();
9286
9287 IEM_MC_END();
9288}
9289
9290
9291/**
9292 * Common worker for FPU instructions working on ST0 and STn, and only affecting
9293 * flags.
9294 *
9295 * @param bRm Mod R/M byte.
9296 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9297 */
9298FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9299{
9300 IEM_MC_BEGIN(3, 1, 0, 0);
9301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9302 IEM_MC_LOCAL(uint16_t, u16Fsw);
9303 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9304 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9305 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9306
9307 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9308 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9309 IEM_MC_PREPARE_FPU_USAGE();
9310 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9311 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9312 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9313 } IEM_MC_ELSE() {
9314 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9315 } IEM_MC_ENDIF();
9316 IEM_MC_ADVANCE_RIP_AND_FINISH();
9317
9318 IEM_MC_END();
9319}
9320
9321
9322/**
9323 * Common worker for FPU instructions working on ST0 and STn, only affecting
9324 * flags, and popping when done.
9325 *
9326 * @param bRm Mod R/M byte.
9327 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9328 */
9329FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9330{
9331 IEM_MC_BEGIN(3, 1, 0, 0);
9332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9333 IEM_MC_LOCAL(uint16_t, u16Fsw);
9334 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9335 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9336 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9337
9338 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9339 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9340 IEM_MC_PREPARE_FPU_USAGE();
9341 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9342 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9343 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9344 } IEM_MC_ELSE() {
9345 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9346 } IEM_MC_ENDIF();
9347 IEM_MC_ADVANCE_RIP_AND_FINISH();
9348
9349 IEM_MC_END();
9350}
9351
9352
9353/** Opcode 0xd8 11/0. */
9354FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
9355{
9356 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
9357 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
9358}
9359
9360
9361/** Opcode 0xd8 11/1. */
9362FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
9363{
9364 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
9365 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
9366}
9367
9368
9369/** Opcode 0xd8 11/2. */
9370FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
9371{
9372 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
9373 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
9374}
9375
9376
9377/** Opcode 0xd8 11/3. */
9378FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
9379{
9380 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
9381 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
9382}
9383
9384
9385/** Opcode 0xd8 11/4. */
9386FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
9387{
9388 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
9389 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
9390}
9391
9392
9393/** Opcode 0xd8 11/5. */
9394FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
9395{
9396 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
9397 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
9398}
9399
9400
9401/** Opcode 0xd8 11/6. */
9402FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
9403{
9404 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
9405 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
9406}
9407
9408
9409/** Opcode 0xd8 11/7. */
9410FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
9411{
9412 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
9413 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
9414}
9415
9416
9417/**
9418 * Common worker for FPU instructions working on ST0 and an m32r, and storing
9419 * the result in ST0.
9420 *
9421 * @param bRm Mod R/M byte.
9422 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9423 */
9424FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
9425{
9426 IEM_MC_BEGIN(3, 3, 0, 0);
9427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9428 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9429 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9430 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9431 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9432 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9433
9434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9436
9437 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9438 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9439 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9440
9441 IEM_MC_PREPARE_FPU_USAGE();
9442 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9443 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
9444 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9445 } IEM_MC_ELSE() {
9446 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9447 } IEM_MC_ENDIF();
9448 IEM_MC_ADVANCE_RIP_AND_FINISH();
9449
9450 IEM_MC_END();
9451}
9452
9453
9454/** Opcode 0xd8 !11/0. */
9455FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
9456{
9457 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
9458 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
9459}
9460
9461
9462/** Opcode 0xd8 !11/1. */
9463FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
9464{
9465 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
9466 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
9467}
9468
9469
9470/** Opcode 0xd8 !11/2. */
9471FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
9472{
9473 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
9474
9475 IEM_MC_BEGIN(3, 3, 0, 0);
9476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9477 IEM_MC_LOCAL(uint16_t, u16Fsw);
9478 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9479 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9480 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9481 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9482
9483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9485
9486 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9487 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9488 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9489
9490 IEM_MC_PREPARE_FPU_USAGE();
9491 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9492 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
9493 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9494 } IEM_MC_ELSE() {
9495 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9496 } IEM_MC_ENDIF();
9497 IEM_MC_ADVANCE_RIP_AND_FINISH();
9498
9499 IEM_MC_END();
9500}
9501
9502
9503/** Opcode 0xd8 !11/3. */
9504FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
9505{
9506 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
9507
9508 IEM_MC_BEGIN(3, 3, 0, 0);
9509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9510 IEM_MC_LOCAL(uint16_t, u16Fsw);
9511 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9512 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9513 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9514 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9515
9516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9518
9519 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9520 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9521 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9522
9523 IEM_MC_PREPARE_FPU_USAGE();
9524 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9525 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
9526 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9527 } IEM_MC_ELSE() {
9528 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9529 } IEM_MC_ENDIF();
9530 IEM_MC_ADVANCE_RIP_AND_FINISH();
9531
9532 IEM_MC_END();
9533}
9534
9535
9536/** Opcode 0xd8 !11/4. */
9537FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
9538{
9539 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
9540 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
9541}
9542
9543
9544/** Opcode 0xd8 !11/5. */
9545FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
9546{
9547 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
9548 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
9549}
9550
9551
9552/** Opcode 0xd8 !11/6. */
9553FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
9554{
9555 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
9556 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
9557}
9558
9559
9560/** Opcode 0xd8 !11/7. */
9561FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
9562{
9563 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
9564 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
9565}
9566
9567
9568/**
9569 * @opcode 0xd8
9570 */
9571FNIEMOP_DEF(iemOp_EscF0)
9572{
9573 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9574 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
9575
9576 if (IEM_IS_MODRM_REG_MODE(bRm))
9577 {
9578 switch (IEM_GET_MODRM_REG_8(bRm))
9579 {
9580 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
9581 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
9582 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
9583 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9584 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
9585 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
9586 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
9587 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
9588 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9589 }
9590 }
9591 else
9592 {
9593 switch (IEM_GET_MODRM_REG_8(bRm))
9594 {
9595 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
9596 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
9597 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
9598 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
9599 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
9600 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
9601 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
9602 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
9603 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9604 }
9605 }
9606}
9607
9608
9609/** Opcode 0xd9 /0 mem32real
9610 * @sa iemOp_fld_m64r */
9611FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
9612{
9613 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
9614
9615 IEM_MC_BEGIN(2, 3, 0, 0);
9616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9617 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9618 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
9619 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9620 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
9621
9622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9624
9625 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9626 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9627 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9628 IEM_MC_PREPARE_FPU_USAGE();
9629 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9630 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
9631 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9632 } IEM_MC_ELSE() {
9633 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9634 } IEM_MC_ENDIF();
9635 IEM_MC_ADVANCE_RIP_AND_FINISH();
9636
9637 IEM_MC_END();
9638}
9639
9640
9641/** Opcode 0xd9 !11/2 mem32real */
9642FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
9643{
9644 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
9645 IEM_MC_BEGIN(3, 2, 0, 0);
9646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9647 IEM_MC_LOCAL(uint16_t, u16Fsw);
9648 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9649 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9650 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9651
9652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9654 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9655 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9656
9657 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9658 IEM_MC_PREPARE_FPU_USAGE();
9659 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9660 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9661 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9662 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9663 } IEM_MC_ELSE() {
9664 IEM_MC_IF_FCW_IM() {
9665 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9666 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9667 } IEM_MC_ENDIF();
9668 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9669 } IEM_MC_ENDIF();
9670 IEM_MC_ADVANCE_RIP_AND_FINISH();
9671
9672 IEM_MC_END();
9673}
9674
9675
9676/** Opcode 0xd9 !11/3 */
9677FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
9678{
9679 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
9680 IEM_MC_BEGIN(3, 2, 0, 0);
9681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9682 IEM_MC_LOCAL(uint16_t, u16Fsw);
9683 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9684 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9685 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9686
9687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9689 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9690 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9691
9692 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9693 IEM_MC_PREPARE_FPU_USAGE();
9694 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9695 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9696 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9697 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9698 } IEM_MC_ELSE() {
9699 IEM_MC_IF_FCW_IM() {
9700 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9701 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9702 } IEM_MC_ENDIF();
9703 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9704 } IEM_MC_ENDIF();
9705 IEM_MC_ADVANCE_RIP_AND_FINISH();
9706
9707 IEM_MC_END();
9708}
9709
9710
9711/** Opcode 0xd9 !11/4 */
9712FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
9713{
9714 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
9715 IEM_MC_BEGIN(3, 0, 0, 0);
9716 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9718
9719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9720 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9721 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9722
9723 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9724 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
9725 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9726 IEM_MC_END();
9727}
9728
9729
9730/** Opcode 0xd9 !11/5 */
9731FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
9732{
9733 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
9734 IEM_MC_BEGIN(1, 1, 0, 0);
9735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9737
9738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9739 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9740 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9741
9742 IEM_MC_ARG(uint16_t, u16Fsw, 0);
9743 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9744
9745 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, iemCImpl_fldcw, u16Fsw);
9746 IEM_MC_END();
9747}
9748
9749
9750/** Opcode 0xd9 !11/6 */
9751FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
9752{
9753 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
9754 IEM_MC_BEGIN(3, 0, 0, 0);
9755 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9757
9758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9759 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9760 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9761
9762 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9763 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
9764 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
9765 IEM_MC_END();
9766}
9767
9768
9769/** Opcode 0xd9 !11/7 */
9770FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
9771{
9772 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
9773 IEM_MC_BEGIN(2, 0, 0, 0);
9774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9775 IEM_MC_LOCAL(uint16_t, u16Fcw);
9776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9778 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9779 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9780 IEM_MC_FETCH_FCW(u16Fcw);
9781 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
9782 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9783 IEM_MC_END();
9784}
9785
9786
9787/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
9788FNIEMOP_DEF(iemOp_fnop)
9789{
9790 IEMOP_MNEMONIC(fnop, "fnop");
9791 IEM_MC_BEGIN(0, 0, 0, 0);
9792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9793 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9794 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9795 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9796 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
9797 * intel optimizations. Investigate. */
9798 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9799 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9800 IEM_MC_END();
9801}
9802
9803
9804/** Opcode 0xd9 11/0 stN */
9805FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
9806{
9807 IEMOP_MNEMONIC(fld_stN, "fld stN");
9808 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9809 * indicates that it does. */
9810 IEM_MC_BEGIN(0, 2, 0, 0);
9811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9812 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9813 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9814 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9815 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9816
9817 IEM_MC_PREPARE_FPU_USAGE();
9818 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
9819 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9820 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9821 } IEM_MC_ELSE() {
9822 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
9823 } IEM_MC_ENDIF();
9824
9825 IEM_MC_ADVANCE_RIP_AND_FINISH();
9826 IEM_MC_END();
9827}
9828
9829
9830/** Opcode 0xd9 11/3 stN */
9831FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
9832{
9833 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
9834 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9835 * indicates that it does. */
9836 IEM_MC_BEGIN(2, 3, 0, 0);
9837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9838 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
9839 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
9840 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9841 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
9842 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
9843 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9844 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9845
9846 IEM_MC_PREPARE_FPU_USAGE();
9847 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9848 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
9849 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
9850 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9851 } IEM_MC_ELSE() {
9852 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
9853 } IEM_MC_ENDIF();
9854
9855 IEM_MC_ADVANCE_RIP_AND_FINISH();
9856 IEM_MC_END();
9857}
9858
9859
9860/** Opcode 0xd9 11/4, 0xdd 11/2. */
9861FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
9862{
9863 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
9864
9865 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
9866 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
9867 if (!iDstReg)
9868 {
9869 IEM_MC_BEGIN(0, 1, 0, 0);
9870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9871 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
9872 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9873 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9874
9875 IEM_MC_PREPARE_FPU_USAGE();
9876 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
9877 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9878 } IEM_MC_ELSE() {
9879 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
9880 } IEM_MC_ENDIF();
9881
9882 IEM_MC_ADVANCE_RIP_AND_FINISH();
9883 IEM_MC_END();
9884 }
9885 else
9886 {
9887 IEM_MC_BEGIN(0, 2, 0, 0);
9888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9889 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9890 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9891 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9892 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9893
9894 IEM_MC_PREPARE_FPU_USAGE();
9895 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9896 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9897 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
9898 } IEM_MC_ELSE() {
9899 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
9900 } IEM_MC_ENDIF();
9901
9902 IEM_MC_ADVANCE_RIP_AND_FINISH();
9903 IEM_MC_END();
9904 }
9905}
9906
9907
9908/**
9909 * Common worker for FPU instructions working on ST0 and replaces it with the
9910 * result, i.e. unary operators.
9911 *
9912 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9913 */
9914FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
9915{
9916 IEM_MC_BEGIN(2, 1, 0, 0);
9917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9918 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9919 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9920 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9921
9922 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9923 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9924 IEM_MC_PREPARE_FPU_USAGE();
9925 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9926 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
9927 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9928 } IEM_MC_ELSE() {
9929 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9930 } IEM_MC_ENDIF();
9931 IEM_MC_ADVANCE_RIP_AND_FINISH();
9932
9933 IEM_MC_END();
9934}
9935
9936
9937/** Opcode 0xd9 0xe0. */
9938FNIEMOP_DEF(iemOp_fchs)
9939{
9940 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
9941 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
9942}
9943
9944
9945/** Opcode 0xd9 0xe1. */
9946FNIEMOP_DEF(iemOp_fabs)
9947{
9948 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
9949 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
9950}
9951
9952
9953/** Opcode 0xd9 0xe4. */
9954FNIEMOP_DEF(iemOp_ftst)
9955{
9956 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
9957 IEM_MC_BEGIN(2, 1, 0, 0);
9958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9959 IEM_MC_LOCAL(uint16_t, u16Fsw);
9960 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9961 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9962
9963 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9964 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9965 IEM_MC_PREPARE_FPU_USAGE();
9966 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9967 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
9968 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9969 } IEM_MC_ELSE() {
9970 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9971 } IEM_MC_ENDIF();
9972 IEM_MC_ADVANCE_RIP_AND_FINISH();
9973
9974 IEM_MC_END();
9975}
9976
9977
9978/** Opcode 0xd9 0xe5. */
9979FNIEMOP_DEF(iemOp_fxam)
9980{
9981 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
9982 IEM_MC_BEGIN(2, 1, 0, 0);
9983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9984 IEM_MC_LOCAL(uint16_t, u16Fsw);
9985 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9986 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9987
9988 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9989 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9990 IEM_MC_PREPARE_FPU_USAGE();
9991 IEM_MC_REF_FPUREG(pr80Value, 0);
9992 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
9993 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9994 IEM_MC_ADVANCE_RIP_AND_FINISH();
9995
9996 IEM_MC_END();
9997}
9998
9999
10000/**
10001 * Common worker for FPU instructions pushing a constant onto the FPU stack.
10002 *
10003 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10004 */
10005FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
10006{
10007 IEM_MC_BEGIN(1, 1, 0, 0);
10008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10009 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10010 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10011
10012 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10013 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10014 IEM_MC_PREPARE_FPU_USAGE();
10015 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10016 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
10017 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10018 } IEM_MC_ELSE() {
10019 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
10020 } IEM_MC_ENDIF();
10021 IEM_MC_ADVANCE_RIP_AND_FINISH();
10022
10023 IEM_MC_END();
10024}
10025
10026
10027/** Opcode 0xd9 0xe8. */
10028FNIEMOP_DEF(iemOp_fld1)
10029{
10030 IEMOP_MNEMONIC(fld1, "fld1");
10031 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
10032}
10033
10034
10035/** Opcode 0xd9 0xe9. */
10036FNIEMOP_DEF(iemOp_fldl2t)
10037{
10038 IEMOP_MNEMONIC(fldl2t, "fldl2t");
10039 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
10040}
10041
10042
10043/** Opcode 0xd9 0xea. */
10044FNIEMOP_DEF(iemOp_fldl2e)
10045{
10046 IEMOP_MNEMONIC(fldl2e, "fldl2e");
10047 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
10048}
10049
10050/** Opcode 0xd9 0xeb. */
10051FNIEMOP_DEF(iemOp_fldpi)
10052{
10053 IEMOP_MNEMONIC(fldpi, "fldpi");
10054 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
10055}
10056
10057
10058/** Opcode 0xd9 0xec. */
10059FNIEMOP_DEF(iemOp_fldlg2)
10060{
10061 IEMOP_MNEMONIC(fldlg2, "fldlg2");
10062 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
10063}
10064
10065/** Opcode 0xd9 0xed. */
10066FNIEMOP_DEF(iemOp_fldln2)
10067{
10068 IEMOP_MNEMONIC(fldln2, "fldln2");
10069 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
10070}
10071
10072
10073/** Opcode 0xd9 0xee. */
10074FNIEMOP_DEF(iemOp_fldz)
10075{
10076 IEMOP_MNEMONIC(fldz, "fldz");
10077 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
10078}
10079
10080
10081/** Opcode 0xd9 0xf0.
10082 *
10083 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
10084 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
10085 * to produce proper results for +Inf and -Inf.
10086 *
10087 * This is probably usful in the implementation pow() and similar.
10088 */
10089FNIEMOP_DEF(iemOp_f2xm1)
10090{
10091 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
10092 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
10093}
10094
10095
10096/**
10097 * Common worker for FPU instructions working on STn and ST0, storing the result
10098 * in STn, and popping the stack unless IE, DE or ZE was raised.
10099 *
10100 * @param bRm Mod R/M byte.
10101 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10102 */
10103FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10104{
10105 IEM_MC_BEGIN(3, 1, 0, 0);
10106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10107 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10108 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10109 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10110 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10111
10112 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10113 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10114
10115 IEM_MC_PREPARE_FPU_USAGE();
10116 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10117 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10118 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10119 } IEM_MC_ELSE() {
10120 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10121 } IEM_MC_ENDIF();
10122 IEM_MC_ADVANCE_RIP_AND_FINISH();
10123
10124 IEM_MC_END();
10125}
10126
10127
10128/** Opcode 0xd9 0xf1. */
10129FNIEMOP_DEF(iemOp_fyl2x)
10130{
10131 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
10132 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
10133}
10134
10135
10136/**
10137 * Common worker for FPU instructions working on ST0 and having two outputs, one
10138 * replacing ST0 and one pushed onto the stack.
10139 *
10140 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10141 */
10142FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
10143{
10144 IEM_MC_BEGIN(2, 1, 0, 0);
10145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10146 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
10147 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
10148 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10149
10150 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10151 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10152 IEM_MC_PREPARE_FPU_USAGE();
10153 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10154 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
10155 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
10156 } IEM_MC_ELSE() {
10157 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
10158 } IEM_MC_ENDIF();
10159 IEM_MC_ADVANCE_RIP_AND_FINISH();
10160
10161 IEM_MC_END();
10162}
10163
10164
10165/** Opcode 0xd9 0xf2. */
10166FNIEMOP_DEF(iemOp_fptan)
10167{
10168 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
10169 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
10170}
10171
10172
10173/** Opcode 0xd9 0xf3. */
10174FNIEMOP_DEF(iemOp_fpatan)
10175{
10176 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
10177 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
10178}
10179
10180
10181/** Opcode 0xd9 0xf4. */
10182FNIEMOP_DEF(iemOp_fxtract)
10183{
10184 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
10185 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
10186}
10187
10188
10189/** Opcode 0xd9 0xf5. */
10190FNIEMOP_DEF(iemOp_fprem1)
10191{
10192 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
10193 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
10194}
10195
10196
10197/** Opcode 0xd9 0xf6. */
10198FNIEMOP_DEF(iemOp_fdecstp)
10199{
10200 IEMOP_MNEMONIC(fdecstp, "fdecstp");
10201 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10202 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10203 * FINCSTP and FDECSTP. */
10204 IEM_MC_BEGIN(0, 0, 0, 0);
10205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10206
10207 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10208 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10209
10210 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10211 IEM_MC_FPU_STACK_DEC_TOP();
10212 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10213
10214 IEM_MC_ADVANCE_RIP_AND_FINISH();
10215 IEM_MC_END();
10216}
10217
10218
10219/** Opcode 0xd9 0xf7. */
10220FNIEMOP_DEF(iemOp_fincstp)
10221{
10222 IEMOP_MNEMONIC(fincstp, "fincstp");
10223 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10224 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10225 * FINCSTP and FDECSTP. */
10226 IEM_MC_BEGIN(0, 0, 0, 0);
10227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10228
10229 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10230 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10231
10232 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10233 IEM_MC_FPU_STACK_INC_TOP();
10234 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10235
10236 IEM_MC_ADVANCE_RIP_AND_FINISH();
10237 IEM_MC_END();
10238}
10239
10240
10241/** Opcode 0xd9 0xf8. */
10242FNIEMOP_DEF(iemOp_fprem)
10243{
10244 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
10245 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
10246}
10247
10248
10249/** Opcode 0xd9 0xf9. */
10250FNIEMOP_DEF(iemOp_fyl2xp1)
10251{
10252 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
10253 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
10254}
10255
10256
10257/** Opcode 0xd9 0xfa. */
10258FNIEMOP_DEF(iemOp_fsqrt)
10259{
10260 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
10261 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
10262}
10263
10264
10265/** Opcode 0xd9 0xfb. */
10266FNIEMOP_DEF(iemOp_fsincos)
10267{
10268 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
10269 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
10270}
10271
10272
10273/** Opcode 0xd9 0xfc. */
10274FNIEMOP_DEF(iemOp_frndint)
10275{
10276 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
10277 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
10278}
10279
10280
10281/** Opcode 0xd9 0xfd. */
10282FNIEMOP_DEF(iemOp_fscale)
10283{
10284 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
10285 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
10286}
10287
10288
10289/** Opcode 0xd9 0xfe. */
10290FNIEMOP_DEF(iemOp_fsin)
10291{
10292 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
10293 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
10294}
10295
10296
10297/** Opcode 0xd9 0xff. */
10298FNIEMOP_DEF(iemOp_fcos)
10299{
10300 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
10301 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
10302}
10303
10304
10305/** Used by iemOp_EscF1. */
10306IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
10307{
10308 /* 0xe0 */ iemOp_fchs,
10309 /* 0xe1 */ iemOp_fabs,
10310 /* 0xe2 */ iemOp_Invalid,
10311 /* 0xe3 */ iemOp_Invalid,
10312 /* 0xe4 */ iemOp_ftst,
10313 /* 0xe5 */ iemOp_fxam,
10314 /* 0xe6 */ iemOp_Invalid,
10315 /* 0xe7 */ iemOp_Invalid,
10316 /* 0xe8 */ iemOp_fld1,
10317 /* 0xe9 */ iemOp_fldl2t,
10318 /* 0xea */ iemOp_fldl2e,
10319 /* 0xeb */ iemOp_fldpi,
10320 /* 0xec */ iemOp_fldlg2,
10321 /* 0xed */ iemOp_fldln2,
10322 /* 0xee */ iemOp_fldz,
10323 /* 0xef */ iemOp_Invalid,
10324 /* 0xf0 */ iemOp_f2xm1,
10325 /* 0xf1 */ iemOp_fyl2x,
10326 /* 0xf2 */ iemOp_fptan,
10327 /* 0xf3 */ iemOp_fpatan,
10328 /* 0xf4 */ iemOp_fxtract,
10329 /* 0xf5 */ iemOp_fprem1,
10330 /* 0xf6 */ iemOp_fdecstp,
10331 /* 0xf7 */ iemOp_fincstp,
10332 /* 0xf8 */ iemOp_fprem,
10333 /* 0xf9 */ iemOp_fyl2xp1,
10334 /* 0xfa */ iemOp_fsqrt,
10335 /* 0xfb */ iemOp_fsincos,
10336 /* 0xfc */ iemOp_frndint,
10337 /* 0xfd */ iemOp_fscale,
10338 /* 0xfe */ iemOp_fsin,
10339 /* 0xff */ iemOp_fcos
10340};
10341
10342
10343/**
10344 * @opcode 0xd9
10345 */
10346FNIEMOP_DEF(iemOp_EscF1)
10347{
10348 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10349 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
10350
10351 if (IEM_IS_MODRM_REG_MODE(bRm))
10352 {
10353 switch (IEM_GET_MODRM_REG_8(bRm))
10354 {
10355 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
10356 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
10357 case 2:
10358 if (bRm == 0xd0)
10359 return FNIEMOP_CALL(iemOp_fnop);
10360 IEMOP_RAISE_INVALID_OPCODE_RET();
10361 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
10362 case 4:
10363 case 5:
10364 case 6:
10365 case 7:
10366 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
10367 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
10368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10369 }
10370 }
10371 else
10372 {
10373 switch (IEM_GET_MODRM_REG_8(bRm))
10374 {
10375 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
10376 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
10377 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
10378 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
10379 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
10380 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
10381 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
10382 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
10383 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10384 }
10385 }
10386}
10387
10388
10389/** Opcode 0xda 11/0. */
10390FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
10391{
10392 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
10393 IEM_MC_BEGIN(0, 1, 0, 0);
10394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10395 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10396
10397 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10398 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10399
10400 IEM_MC_PREPARE_FPU_USAGE();
10401 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10402 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10403 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10404 } IEM_MC_ENDIF();
10405 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10406 } IEM_MC_ELSE() {
10407 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10408 } IEM_MC_ENDIF();
10409 IEM_MC_ADVANCE_RIP_AND_FINISH();
10410
10411 IEM_MC_END();
10412}
10413
10414
10415/** Opcode 0xda 11/1. */
10416FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
10417{
10418 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
10419 IEM_MC_BEGIN(0, 1, 0, 0);
10420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10421 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10422
10423 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10424 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10425
10426 IEM_MC_PREPARE_FPU_USAGE();
10427 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10428 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10429 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10430 } IEM_MC_ENDIF();
10431 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10432 } IEM_MC_ELSE() {
10433 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10434 } IEM_MC_ENDIF();
10435 IEM_MC_ADVANCE_RIP_AND_FINISH();
10436
10437 IEM_MC_END();
10438}
10439
10440
10441/** Opcode 0xda 11/2. */
10442FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
10443{
10444 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
10445 IEM_MC_BEGIN(0, 1, 0, 0);
10446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10447 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10448
10449 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10450 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10451
10452 IEM_MC_PREPARE_FPU_USAGE();
10453 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10454 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10455 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10456 } IEM_MC_ENDIF();
10457 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10458 } IEM_MC_ELSE() {
10459 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10460 } IEM_MC_ENDIF();
10461 IEM_MC_ADVANCE_RIP_AND_FINISH();
10462
10463 IEM_MC_END();
10464}
10465
10466
10467/** Opcode 0xda 11/3. */
10468FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
10469{
10470 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
10471 IEM_MC_BEGIN(0, 1, 0, 0);
10472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10473 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10474
10475 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10476 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10477
10478 IEM_MC_PREPARE_FPU_USAGE();
10479 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10480 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10481 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10482 } IEM_MC_ENDIF();
10483 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10484 } IEM_MC_ELSE() {
10485 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10486 } IEM_MC_ENDIF();
10487 IEM_MC_ADVANCE_RIP_AND_FINISH();
10488
10489 IEM_MC_END();
10490}
10491
10492
10493/**
10494 * Common worker for FPU instructions working on ST0 and ST1, only affecting
10495 * flags, and popping twice when done.
10496 *
10497 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10498 */
10499FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10500{
10501 IEM_MC_BEGIN(3, 1, 0, 0);
10502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10503 IEM_MC_LOCAL(uint16_t, u16Fsw);
10504 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10505 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10506 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10507
10508 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10509 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10510
10511 IEM_MC_PREPARE_FPU_USAGE();
10512 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
10513 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10514 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10515 } IEM_MC_ELSE() {
10516 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
10517 } IEM_MC_ENDIF();
10518 IEM_MC_ADVANCE_RIP_AND_FINISH();
10519
10520 IEM_MC_END();
10521}
10522
10523
10524/** Opcode 0xda 0xe9. */
10525FNIEMOP_DEF(iemOp_fucompp)
10526{
10527 IEMOP_MNEMONIC(fucompp, "fucompp");
10528 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
10529}
10530
10531
10532/**
10533 * Common worker for FPU instructions working on ST0 and an m32i, and storing
10534 * the result in ST0.
10535 *
10536 * @param bRm Mod R/M byte.
10537 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10538 */
10539FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
10540{
10541 IEM_MC_BEGIN(3, 3, 0, 0);
10542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10543 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10544 IEM_MC_LOCAL(int32_t, i32Val2);
10545 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10546 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10547 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10548
10549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10551
10552 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10553 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10554 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10555
10556 IEM_MC_PREPARE_FPU_USAGE();
10557 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10558 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
10559 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10560 } IEM_MC_ELSE() {
10561 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10562 } IEM_MC_ENDIF();
10563 IEM_MC_ADVANCE_RIP_AND_FINISH();
10564
10565 IEM_MC_END();
10566}
10567
10568
10569/** Opcode 0xda !11/0. */
10570FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
10571{
10572 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
10573 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
10574}
10575
10576
10577/** Opcode 0xda !11/1. */
10578FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
10579{
10580 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
10581 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
10582}
10583
10584
10585/** Opcode 0xda !11/2. */
10586FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
10587{
10588 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
10589
10590 IEM_MC_BEGIN(3, 3, 0, 0);
10591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10592 IEM_MC_LOCAL(uint16_t, u16Fsw);
10593 IEM_MC_LOCAL(int32_t, i32Val2);
10594 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10595 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10596 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10597
10598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10600
10601 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10602 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10603 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10604
10605 IEM_MC_PREPARE_FPU_USAGE();
10606 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10607 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10608 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10609 } IEM_MC_ELSE() {
10610 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10611 } IEM_MC_ENDIF();
10612 IEM_MC_ADVANCE_RIP_AND_FINISH();
10613
10614 IEM_MC_END();
10615}
10616
10617
10618/** Opcode 0xda !11/3. */
10619FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
10620{
10621 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
10622
10623 IEM_MC_BEGIN(3, 3, 0, 0);
10624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10625 IEM_MC_LOCAL(uint16_t, u16Fsw);
10626 IEM_MC_LOCAL(int32_t, i32Val2);
10627 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10628 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10629 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10630
10631 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10633
10634 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10635 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10636 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10637
10638 IEM_MC_PREPARE_FPU_USAGE();
10639 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10640 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10641 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10642 } IEM_MC_ELSE() {
10643 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10644 } IEM_MC_ENDIF();
10645 IEM_MC_ADVANCE_RIP_AND_FINISH();
10646
10647 IEM_MC_END();
10648}
10649
10650
10651/** Opcode 0xda !11/4. */
10652FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
10653{
10654 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
10655 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
10656}
10657
10658
10659/** Opcode 0xda !11/5. */
10660FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
10661{
10662 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
10663 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
10664}
10665
10666
10667/** Opcode 0xda !11/6. */
10668FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
10669{
10670 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
10671 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
10672}
10673
10674
10675/** Opcode 0xda !11/7. */
10676FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
10677{
10678 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
10679 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
10680}
10681
10682
10683/**
10684 * @opcode 0xda
10685 */
10686FNIEMOP_DEF(iemOp_EscF2)
10687{
10688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10689 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
10690 if (IEM_IS_MODRM_REG_MODE(bRm))
10691 {
10692 switch (IEM_GET_MODRM_REG_8(bRm))
10693 {
10694 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
10695 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
10696 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
10697 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
10698 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10699 case 5:
10700 if (bRm == 0xe9)
10701 return FNIEMOP_CALL(iemOp_fucompp);
10702 IEMOP_RAISE_INVALID_OPCODE_RET();
10703 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10704 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10705 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10706 }
10707 }
10708 else
10709 {
10710 switch (IEM_GET_MODRM_REG_8(bRm))
10711 {
10712 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
10713 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
10714 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
10715 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
10716 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
10717 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
10718 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
10719 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
10720 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10721 }
10722 }
10723}
10724
10725
10726/** Opcode 0xdb !11/0. */
10727FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
10728{
10729 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
10730
10731 IEM_MC_BEGIN(2, 3, 0, 0);
10732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10733 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10734 IEM_MC_LOCAL(int32_t, i32Val);
10735 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10736 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
10737
10738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10740
10741 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10742 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10743 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10744
10745 IEM_MC_PREPARE_FPU_USAGE();
10746 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10747 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
10748 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10749 } IEM_MC_ELSE() {
10750 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10751 } IEM_MC_ENDIF();
10752 IEM_MC_ADVANCE_RIP_AND_FINISH();
10753
10754 IEM_MC_END();
10755}
10756
10757
10758/** Opcode 0xdb !11/1. */
10759FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
10760{
10761 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
10762 IEM_MC_BEGIN(3, 2, 0, 0);
10763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10764 IEM_MC_LOCAL(uint16_t, u16Fsw);
10765 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10766 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10767 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10768
10769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10771 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10772 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10773
10774 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10775 IEM_MC_PREPARE_FPU_USAGE();
10776 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10777 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10778 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10779 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10780 } IEM_MC_ELSE() {
10781 IEM_MC_IF_FCW_IM() {
10782 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10783 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10784 } IEM_MC_ENDIF();
10785 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10786 } IEM_MC_ENDIF();
10787 IEM_MC_ADVANCE_RIP_AND_FINISH();
10788
10789 IEM_MC_END();
10790}
10791
10792
10793/** Opcode 0xdb !11/2. */
10794FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
10795{
10796 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
10797 IEM_MC_BEGIN(3, 2, 0, 0);
10798 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10799 IEM_MC_LOCAL(uint16_t, u16Fsw);
10800 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10801 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10802 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10803
10804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10806 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10807 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10808
10809 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10810 IEM_MC_PREPARE_FPU_USAGE();
10811 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10812 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10813 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10814 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10815 } IEM_MC_ELSE() {
10816 IEM_MC_IF_FCW_IM() {
10817 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10818 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10819 } IEM_MC_ENDIF();
10820 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10821 } IEM_MC_ENDIF();
10822 IEM_MC_ADVANCE_RIP_AND_FINISH();
10823
10824 IEM_MC_END();
10825}
10826
10827
10828/** Opcode 0xdb !11/3. */
10829FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
10830{
10831 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
10832 IEM_MC_BEGIN(3, 2, 0, 0);
10833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10834 IEM_MC_LOCAL(uint16_t, u16Fsw);
10835 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10836 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10837 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10838
10839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10841 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10842 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10843
10844 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10845 IEM_MC_PREPARE_FPU_USAGE();
10846 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10847 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10848 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10849 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10850 } IEM_MC_ELSE() {
10851 IEM_MC_IF_FCW_IM() {
10852 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10853 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10854 } IEM_MC_ENDIF();
10855 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10856 } IEM_MC_ENDIF();
10857 IEM_MC_ADVANCE_RIP_AND_FINISH();
10858
10859 IEM_MC_END();
10860}
10861
10862
10863/** Opcode 0xdb !11/5. */
10864FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
10865{
10866 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
10867
10868 IEM_MC_BEGIN(2, 3, 0, 0);
10869 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10870 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10871 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
10872 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10873 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
10874
10875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10877
10878 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10879 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10880 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10881
10882 IEM_MC_PREPARE_FPU_USAGE();
10883 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10884 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
10885 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10886 } IEM_MC_ELSE() {
10887 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10888 } IEM_MC_ENDIF();
10889 IEM_MC_ADVANCE_RIP_AND_FINISH();
10890
10891 IEM_MC_END();
10892}
10893
10894
10895/** Opcode 0xdb !11/7. */
10896FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
10897{
10898 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
10899 IEM_MC_BEGIN(3, 2, 0, 0);
10900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10901 IEM_MC_LOCAL(uint16_t, u16Fsw);
10902 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10903 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
10904 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10905
10906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10908 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10909 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10910
10911 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
10912 IEM_MC_PREPARE_FPU_USAGE();
10913 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10914 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
10915 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
10916 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10917 } IEM_MC_ELSE() {
10918 IEM_MC_IF_FCW_IM() {
10919 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
10920 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
10921 } IEM_MC_ENDIF();
10922 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10923 } IEM_MC_ENDIF();
10924 IEM_MC_ADVANCE_RIP_AND_FINISH();
10925
10926 IEM_MC_END();
10927}
10928
10929
10930/** Opcode 0xdb 11/0. */
10931FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
10932{
10933 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
10934 IEM_MC_BEGIN(0, 1, 0, 0);
10935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10936 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10937
10938 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10939 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10940
10941 IEM_MC_PREPARE_FPU_USAGE();
10942 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10943 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
10944 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10945 } IEM_MC_ENDIF();
10946 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10947 } IEM_MC_ELSE() {
10948 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10949 } IEM_MC_ENDIF();
10950 IEM_MC_ADVANCE_RIP_AND_FINISH();
10951
10952 IEM_MC_END();
10953}
10954
10955
10956/** Opcode 0xdb 11/1. */
10957FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
10958{
10959 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
10960 IEM_MC_BEGIN(0, 1, 0, 0);
10961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10962 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10963
10964 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10965 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10966
10967 IEM_MC_PREPARE_FPU_USAGE();
10968 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10969 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10970 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10971 } IEM_MC_ENDIF();
10972 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10973 } IEM_MC_ELSE() {
10974 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10975 } IEM_MC_ENDIF();
10976 IEM_MC_ADVANCE_RIP_AND_FINISH();
10977
10978 IEM_MC_END();
10979}
10980
10981
10982/** Opcode 0xdb 11/2. */
10983FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
10984{
10985 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
10986 IEM_MC_BEGIN(0, 1, 0, 0);
10987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10988 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10989
10990 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10991 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10992
10993 IEM_MC_PREPARE_FPU_USAGE();
10994 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10995 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10996 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10997 } IEM_MC_ENDIF();
10998 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10999 } IEM_MC_ELSE() {
11000 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11001 } IEM_MC_ENDIF();
11002 IEM_MC_ADVANCE_RIP_AND_FINISH();
11003
11004 IEM_MC_END();
11005}
11006
11007
11008/** Opcode 0xdb 11/3. */
11009FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
11010{
11011 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
11012 IEM_MC_BEGIN(0, 1, 0, 0);
11013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11014 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11015
11016 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11017 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11018
11019 IEM_MC_PREPARE_FPU_USAGE();
11020 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11021 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
11022 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11023 } IEM_MC_ENDIF();
11024 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11025 } IEM_MC_ELSE() {
11026 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11027 } IEM_MC_ENDIF();
11028 IEM_MC_ADVANCE_RIP_AND_FINISH();
11029
11030 IEM_MC_END();
11031}
11032
11033
11034/** Opcode 0xdb 0xe0. */
11035FNIEMOP_DEF(iemOp_fneni)
11036{
11037 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
11038 IEM_MC_BEGIN(0, 0, 0, 0);
11039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11040 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11041 IEM_MC_ADVANCE_RIP_AND_FINISH();
11042 IEM_MC_END();
11043}
11044
11045
11046/** Opcode 0xdb 0xe1. */
11047FNIEMOP_DEF(iemOp_fndisi)
11048{
11049 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
11050 IEM_MC_BEGIN(0, 0, 0, 0);
11051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11052 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11053 IEM_MC_ADVANCE_RIP_AND_FINISH();
11054 IEM_MC_END();
11055}
11056
11057
11058/** Opcode 0xdb 0xe2. */
11059FNIEMOP_DEF(iemOp_fnclex)
11060{
11061 IEMOP_MNEMONIC(fnclex, "fnclex");
11062 IEM_MC_BEGIN(0, 0, 0, 0);
11063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11064 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11065 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11066 IEM_MC_CLEAR_FSW_EX();
11067 IEM_MC_ADVANCE_RIP_AND_FINISH();
11068 IEM_MC_END();
11069}
11070
11071
11072/** Opcode 0xdb 0xe3. */
11073FNIEMOP_DEF(iemOp_fninit)
11074{
11075 IEMOP_MNEMONIC(fninit, "fninit");
11076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11077 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, 0, iemCImpl_finit, false /*fCheckXcpts*/);
11078}
11079
11080
11081/** Opcode 0xdb 0xe4. */
11082FNIEMOP_DEF(iemOp_fnsetpm)
11083{
11084 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
11085 IEM_MC_BEGIN(0, 0, 0, 0);
11086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11087 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11088 IEM_MC_ADVANCE_RIP_AND_FINISH();
11089 IEM_MC_END();
11090}
11091
11092
11093/** Opcode 0xdb 0xe5. */
11094FNIEMOP_DEF(iemOp_frstpm)
11095{
11096 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
11097#if 0 /* #UDs on newer CPUs */
11098 IEM_MC_BEGIN(0, 0, 0, 0);
11099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11100 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11101 IEM_MC_ADVANCE_RIP_AND_FINISH();
11102 IEM_MC_END();
11103 return VINF_SUCCESS;
11104#else
11105 IEMOP_RAISE_INVALID_OPCODE_RET();
11106#endif
11107}
11108
11109
11110/** Opcode 0xdb 11/5. */
11111FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
11112{
11113 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
11114 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11115 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
11116 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11117}
11118
11119
11120/** Opcode 0xdb 11/6. */
11121FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
11122{
11123 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
11124 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11125 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11126 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11127}
11128
11129
11130/**
11131 * @opcode 0xdb
11132 */
11133FNIEMOP_DEF(iemOp_EscF3)
11134{
11135 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11136 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
11137 if (IEM_IS_MODRM_REG_MODE(bRm))
11138 {
11139 switch (IEM_GET_MODRM_REG_8(bRm))
11140 {
11141 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
11142 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
11143 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
11144 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
11145 case 4:
11146 switch (bRm)
11147 {
11148 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
11149 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
11150 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
11151 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
11152 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
11153 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
11154 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
11155 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
11156 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11157 }
11158 break;
11159 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
11160 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
11161 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11162 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11163 }
11164 }
11165 else
11166 {
11167 switch (IEM_GET_MODRM_REG_8(bRm))
11168 {
11169 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
11170 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
11171 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
11172 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
11173 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11174 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
11175 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11176 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
11177 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11178 }
11179 }
11180}
11181
11182
11183/**
11184 * Common worker for FPU instructions working on STn and ST0, and storing the
11185 * result in STn unless IE, DE or ZE was raised.
11186 *
11187 * @param bRm Mod R/M byte.
11188 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11189 */
11190FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11191{
11192 IEM_MC_BEGIN(3, 1, 0, 0);
11193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11194 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11195 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11196 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11197 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11198
11199 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11200 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11201
11202 IEM_MC_PREPARE_FPU_USAGE();
11203 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
11204 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11205 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11206 } IEM_MC_ELSE() {
11207 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11208 } IEM_MC_ENDIF();
11209 IEM_MC_ADVANCE_RIP_AND_FINISH();
11210
11211 IEM_MC_END();
11212}
11213
11214
11215/** Opcode 0xdc 11/0. */
11216FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
11217{
11218 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
11219 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
11220}
11221
11222
11223/** Opcode 0xdc 11/1. */
11224FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
11225{
11226 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
11227 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
11228}
11229
11230
11231/** Opcode 0xdc 11/4. */
11232FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
11233{
11234 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
11235 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
11236}
11237
11238
11239/** Opcode 0xdc 11/5. */
11240FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
11241{
11242 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
11243 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
11244}
11245
11246
11247/** Opcode 0xdc 11/6. */
11248FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
11249{
11250 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
11251 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
11252}
11253
11254
11255/** Opcode 0xdc 11/7. */
11256FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
11257{
11258 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
11259 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
11260}
11261
11262
11263/**
11264 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
11265 * memory operand, and storing the result in ST0.
11266 *
11267 * @param bRm Mod R/M byte.
11268 * @param pfnImpl Pointer to the instruction implementation (assembly).
11269 */
11270FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
11271{
11272 IEM_MC_BEGIN(3, 3, 0, 0);
11273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11274 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11275 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
11276 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11277 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
11278 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
11279
11280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11282 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11283 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11284
11285 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11286 IEM_MC_PREPARE_FPU_USAGE();
11287 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
11288 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
11289 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11290 } IEM_MC_ELSE() {
11291 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11292 } IEM_MC_ENDIF();
11293 IEM_MC_ADVANCE_RIP_AND_FINISH();
11294
11295 IEM_MC_END();
11296}
11297
11298
11299/** Opcode 0xdc !11/0. */
11300FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
11301{
11302 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
11303 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
11304}
11305
11306
11307/** Opcode 0xdc !11/1. */
11308FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
11309{
11310 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
11311 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
11312}
11313
11314
11315/** Opcode 0xdc !11/2. */
11316FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
11317{
11318 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
11319
11320 IEM_MC_BEGIN(3, 3, 0, 0);
11321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11322 IEM_MC_LOCAL(uint16_t, u16Fsw);
11323 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11324 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11325 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11326 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11327
11328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11330
11331 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11332 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11333 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11334
11335 IEM_MC_PREPARE_FPU_USAGE();
11336 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11337 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11338 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11339 } IEM_MC_ELSE() {
11340 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11341 } IEM_MC_ENDIF();
11342 IEM_MC_ADVANCE_RIP_AND_FINISH();
11343
11344 IEM_MC_END();
11345}
11346
11347
11348/** Opcode 0xdc !11/3. */
11349FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
11350{
11351 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
11352
11353 IEM_MC_BEGIN(3, 3, 0, 0);
11354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11355 IEM_MC_LOCAL(uint16_t, u16Fsw);
11356 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11357 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11358 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11359 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11360
11361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11363
11364 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11365 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11366 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11367
11368 IEM_MC_PREPARE_FPU_USAGE();
11369 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11370 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11371 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11372 } IEM_MC_ELSE() {
11373 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11374 } IEM_MC_ENDIF();
11375 IEM_MC_ADVANCE_RIP_AND_FINISH();
11376
11377 IEM_MC_END();
11378}
11379
11380
11381/** Opcode 0xdc !11/4. */
11382FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
11383{
11384 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
11385 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
11386}
11387
11388
11389/** Opcode 0xdc !11/5. */
11390FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
11391{
11392 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
11393 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
11394}
11395
11396
11397/** Opcode 0xdc !11/6. */
11398FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
11399{
11400 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
11401 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
11402}
11403
11404
11405/** Opcode 0xdc !11/7. */
11406FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
11407{
11408 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
11409 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
11410}
11411
11412
11413/**
11414 * @opcode 0xdc
11415 */
11416FNIEMOP_DEF(iemOp_EscF4)
11417{
11418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11419 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
11420 if (IEM_IS_MODRM_REG_MODE(bRm))
11421 {
11422 switch (IEM_GET_MODRM_REG_8(bRm))
11423 {
11424 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
11425 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
11426 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
11427 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
11428 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
11429 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
11430 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
11431 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
11432 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11433 }
11434 }
11435 else
11436 {
11437 switch (IEM_GET_MODRM_REG_8(bRm))
11438 {
11439 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
11440 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
11441 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
11442 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
11443 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
11444 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
11445 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
11446 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
11447 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11448 }
11449 }
11450}
11451
11452
11453/** Opcode 0xdd !11/0.
11454 * @sa iemOp_fld_m32r */
11455FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
11456{
11457 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
11458
11459 IEM_MC_BEGIN(2, 3, 0, 0);
11460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11461 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11462 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
11463 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11464 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
11465
11466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11468 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11469 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11470
11471 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11472 IEM_MC_PREPARE_FPU_USAGE();
11473 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11474 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
11475 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11476 } IEM_MC_ELSE() {
11477 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11478 } IEM_MC_ENDIF();
11479 IEM_MC_ADVANCE_RIP_AND_FINISH();
11480
11481 IEM_MC_END();
11482}
11483
11484
11485/** Opcode 0xdd !11/0. */
11486FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
11487{
11488 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
11489 IEM_MC_BEGIN(3, 2, 0, 0);
11490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11491 IEM_MC_LOCAL(uint16_t, u16Fsw);
11492 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11493 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11494 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11495
11496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11498 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11499 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11500
11501 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11502 IEM_MC_PREPARE_FPU_USAGE();
11503 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11504 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11505 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11506 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11507 } IEM_MC_ELSE() {
11508 IEM_MC_IF_FCW_IM() {
11509 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11510 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
11511 } IEM_MC_ENDIF();
11512 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11513 } IEM_MC_ENDIF();
11514 IEM_MC_ADVANCE_RIP_AND_FINISH();
11515
11516 IEM_MC_END();
11517}
11518
11519
11520/** Opcode 0xdd !11/0. */
11521FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
11522{
11523 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
11524 IEM_MC_BEGIN(3, 2, 0, 0);
11525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11526 IEM_MC_LOCAL(uint16_t, u16Fsw);
11527 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11528 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11529 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11530
11531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11533 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11534 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11535
11536 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11537 IEM_MC_PREPARE_FPU_USAGE();
11538 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11539 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11540 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11541 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11542 } IEM_MC_ELSE() {
11543 IEM_MC_IF_FCW_IM() {
11544 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11545 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11546 } IEM_MC_ENDIF();
11547 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11548 } IEM_MC_ENDIF();
11549 IEM_MC_ADVANCE_RIP_AND_FINISH();
11550
11551 IEM_MC_END();
11552}
11553
11554
11555
11556
11557/** Opcode 0xdd !11/0. */
11558FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
11559{
11560 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
11561 IEM_MC_BEGIN(3, 2, 0, 0);
11562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11563 IEM_MC_LOCAL(uint16_t, u16Fsw);
11564 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11565 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11566 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11567
11568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11570 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11571 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11572
11573 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11574 IEM_MC_PREPARE_FPU_USAGE();
11575 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11576 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11577 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11578 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11579 } IEM_MC_ELSE() {
11580 IEM_MC_IF_FCW_IM() {
11581 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11582 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11583 } IEM_MC_ENDIF();
11584 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11585 } IEM_MC_ENDIF();
11586 IEM_MC_ADVANCE_RIP_AND_FINISH();
11587
11588 IEM_MC_END();
11589}
11590
11591
11592/** Opcode 0xdd !11/0. */
11593FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
11594{
11595 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
11596 IEM_MC_BEGIN(3, 0, 0, 0);
11597 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
11598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11599
11600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11601 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11602 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11603
11604 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11605 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11606 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
11607 IEM_MC_END();
11608}
11609
11610
11611/** Opcode 0xdd !11/0. */
11612FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
11613{
11614 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
11615 IEM_MC_BEGIN(3, 0, 0, 0);
11616 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
11617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11618
11619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11620 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11621 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
11622
11623 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11624 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11625 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
11626 IEM_MC_END();
11627}
11628
11629/** Opcode 0xdd !11/0. */
11630FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
11631{
11632 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
11633
11634 IEM_MC_BEGIN(0, 2, 0, 0);
11635 IEM_MC_LOCAL(uint16_t, u16Tmp);
11636 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11637
11638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11640 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11641
11642 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11643 IEM_MC_FETCH_FSW(u16Tmp);
11644 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
11645 IEM_MC_ADVANCE_RIP_AND_FINISH();
11646
11647/** @todo Debug / drop a hint to the verifier that things may differ
11648 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
11649 * NT4SP1. (X86_FSW_PE) */
11650 IEM_MC_END();
11651}
11652
11653
11654/** Opcode 0xdd 11/0. */
11655FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
11656{
11657 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
11658 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
11659 unmodified. */
11660 IEM_MC_BEGIN(0, 0, 0, 0);
11661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11662
11663 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11664 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11665
11666 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11667 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11668 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11669
11670 IEM_MC_ADVANCE_RIP_AND_FINISH();
11671 IEM_MC_END();
11672}
11673
11674
11675/** Opcode 0xdd 11/1. */
11676FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
11677{
11678 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
11679 IEM_MC_BEGIN(0, 2, 0, 0);
11680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11681 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11682 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11683 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11684 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11685
11686 IEM_MC_PREPARE_FPU_USAGE();
11687 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11688 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11689 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11690 } IEM_MC_ELSE() {
11691 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11692 } IEM_MC_ENDIF();
11693
11694 IEM_MC_ADVANCE_RIP_AND_FINISH();
11695 IEM_MC_END();
11696}
11697
11698
11699/** Opcode 0xdd 11/3. */
11700FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
11701{
11702 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
11703 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
11704}
11705
11706
11707/** Opcode 0xdd 11/4. */
11708FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
11709{
11710 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
11711 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
11712}
11713
11714
11715/**
11716 * @opcode 0xdd
11717 */
11718FNIEMOP_DEF(iemOp_EscF5)
11719{
11720 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11721 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
11722 if (IEM_IS_MODRM_REG_MODE(bRm))
11723 {
11724 switch (IEM_GET_MODRM_REG_8(bRm))
11725 {
11726 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
11727 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
11728 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
11729 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
11730 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
11731 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
11732 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11733 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11734 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11735 }
11736 }
11737 else
11738 {
11739 switch (IEM_GET_MODRM_REG_8(bRm))
11740 {
11741 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
11742 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
11743 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
11744 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
11745 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
11746 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
11747 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
11748 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
11749 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11750 }
11751 }
11752}
11753
11754
11755/** Opcode 0xde 11/0. */
11756FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
11757{
11758 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
11759 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
11760}
11761
11762
11763/** Opcode 0xde 11/0. */
11764FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
11765{
11766 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
11767 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
11768}
11769
11770
11771/** Opcode 0xde 0xd9. */
11772FNIEMOP_DEF(iemOp_fcompp)
11773{
11774 IEMOP_MNEMONIC(fcompp, "fcompp");
11775 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
11776}
11777
11778
11779/** Opcode 0xde 11/4. */
11780FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
11781{
11782 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
11783 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
11784}
11785
11786
11787/** Opcode 0xde 11/5. */
11788FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
11789{
11790 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
11791 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
11792}
11793
11794
11795/** Opcode 0xde 11/6. */
11796FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
11797{
11798 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
11799 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
11800}
11801
11802
11803/** Opcode 0xde 11/7. */
11804FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
11805{
11806 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
11807 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
11808}
11809
11810
11811/**
11812 * Common worker for FPU instructions working on ST0 and an m16i, and storing
11813 * the result in ST0.
11814 *
11815 * @param bRm Mod R/M byte.
11816 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11817 */
11818FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
11819{
11820 IEM_MC_BEGIN(3, 3, 0, 0);
11821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11822 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11823 IEM_MC_LOCAL(int16_t, i16Val2);
11824 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11825 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11826 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11827
11828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11830
11831 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11832 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11833 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11834
11835 IEM_MC_PREPARE_FPU_USAGE();
11836 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11837 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
11838 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11839 } IEM_MC_ELSE() {
11840 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11841 } IEM_MC_ENDIF();
11842 IEM_MC_ADVANCE_RIP_AND_FINISH();
11843
11844 IEM_MC_END();
11845}
11846
11847
11848/** Opcode 0xde !11/0. */
11849FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
11850{
11851 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
11852 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
11853}
11854
11855
11856/** Opcode 0xde !11/1. */
11857FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
11858{
11859 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
11860 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
11861}
11862
11863
11864/** Opcode 0xde !11/2. */
11865FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
11866{
11867 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
11868
11869 IEM_MC_BEGIN(3, 3, 0, 0);
11870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11871 IEM_MC_LOCAL(uint16_t, u16Fsw);
11872 IEM_MC_LOCAL(int16_t, i16Val2);
11873 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11874 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11875 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11876
11877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11879
11880 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11881 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11882 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11883
11884 IEM_MC_PREPARE_FPU_USAGE();
11885 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11886 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11887 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11888 } IEM_MC_ELSE() {
11889 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11890 } IEM_MC_ENDIF();
11891 IEM_MC_ADVANCE_RIP_AND_FINISH();
11892
11893 IEM_MC_END();
11894}
11895
11896
11897/** Opcode 0xde !11/3. */
11898FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
11899{
11900 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
11901
11902 IEM_MC_BEGIN(3, 3, 0, 0);
11903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11904 IEM_MC_LOCAL(uint16_t, u16Fsw);
11905 IEM_MC_LOCAL(int16_t, i16Val2);
11906 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11907 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11908 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11909
11910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11912
11913 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11914 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11915 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11916
11917 IEM_MC_PREPARE_FPU_USAGE();
11918 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11919 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11920 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11921 } IEM_MC_ELSE() {
11922 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11923 } IEM_MC_ENDIF();
11924 IEM_MC_ADVANCE_RIP_AND_FINISH();
11925
11926 IEM_MC_END();
11927}
11928
11929
11930/** Opcode 0xde !11/4. */
11931FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
11932{
11933 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
11934 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
11935}
11936
11937
11938/** Opcode 0xde !11/5. */
11939FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
11940{
11941 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
11942 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
11943}
11944
11945
11946/** Opcode 0xde !11/6. */
11947FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
11948{
11949 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
11950 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
11951}
11952
11953
11954/** Opcode 0xde !11/7. */
11955FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
11956{
11957 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
11958 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
11959}
11960
11961
11962/**
11963 * @opcode 0xde
11964 */
11965FNIEMOP_DEF(iemOp_EscF6)
11966{
11967 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11968 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
11969 if (IEM_IS_MODRM_REG_MODE(bRm))
11970 {
11971 switch (IEM_GET_MODRM_REG_8(bRm))
11972 {
11973 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
11974 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
11975 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
11976 case 3: if (bRm == 0xd9)
11977 return FNIEMOP_CALL(iemOp_fcompp);
11978 IEMOP_RAISE_INVALID_OPCODE_RET();
11979 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
11980 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
11981 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
11982 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
11983 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11984 }
11985 }
11986 else
11987 {
11988 switch (IEM_GET_MODRM_REG_8(bRm))
11989 {
11990 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
11991 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
11992 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
11993 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
11994 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
11995 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
11996 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
11997 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
11998 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11999 }
12000 }
12001}
12002
12003
12004/** Opcode 0xdf 11/0.
12005 * Undocument instruction, assumed to work like ffree + fincstp. */
12006FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
12007{
12008 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
12009 IEM_MC_BEGIN(0, 0, 0, 0);
12010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12011
12012 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12013 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12014
12015 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12016 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12017 IEM_MC_FPU_STACK_INC_TOP();
12018 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12019
12020 IEM_MC_ADVANCE_RIP_AND_FINISH();
12021 IEM_MC_END();
12022}
12023
12024
12025/** Opcode 0xdf 0xe0. */
12026FNIEMOP_DEF(iemOp_fnstsw_ax)
12027{
12028 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
12029 IEM_MC_BEGIN(0, 1, 0, 0);
12030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12031 IEM_MC_LOCAL(uint16_t, u16Tmp);
12032 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12033 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12034 IEM_MC_FETCH_FSW(u16Tmp);
12035 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12036 IEM_MC_ADVANCE_RIP_AND_FINISH();
12037 IEM_MC_END();
12038}
12039
12040
12041/** Opcode 0xdf 11/5. */
12042FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
12043{
12044 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
12045 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12046 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12047 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12048}
12049
12050
12051/** Opcode 0xdf 11/6. */
12052FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
12053{
12054 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
12055 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12056 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12057 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12058}
12059
12060
12061/** Opcode 0xdf !11/0. */
12062FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
12063{
12064 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
12065
12066 IEM_MC_BEGIN(2, 3, 0, 0);
12067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12068 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12069 IEM_MC_LOCAL(int16_t, i16Val);
12070 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12071 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
12072
12073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12075
12076 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12077 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12078 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12079
12080 IEM_MC_PREPARE_FPU_USAGE();
12081 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12082 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
12083 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12084 } IEM_MC_ELSE() {
12085 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12086 } IEM_MC_ENDIF();
12087 IEM_MC_ADVANCE_RIP_AND_FINISH();
12088
12089 IEM_MC_END();
12090}
12091
12092
12093/** Opcode 0xdf !11/1. */
12094FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
12095{
12096 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
12097 IEM_MC_BEGIN(3, 2, 0, 0);
12098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12099 IEM_MC_LOCAL(uint16_t, u16Fsw);
12100 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12101 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12102 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12103
12104 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12106 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12107 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12108
12109 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
12110 IEM_MC_PREPARE_FPU_USAGE();
12111 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12112 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12113 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
12114 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12115 } IEM_MC_ELSE() {
12116 IEM_MC_IF_FCW_IM() {
12117 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12118 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
12119 } IEM_MC_ENDIF();
12120 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12121 } IEM_MC_ENDIF();
12122 IEM_MC_ADVANCE_RIP_AND_FINISH();
12123
12124 IEM_MC_END();
12125}
12126
12127
12128/** Opcode 0xdf !11/2. */
12129FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
12130{
12131 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
12132 IEM_MC_BEGIN(3, 2, 0, 0);
12133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12134 IEM_MC_LOCAL(uint16_t, u16Fsw);
12135 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12136 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12137 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12138
12139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12141 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12142 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12143
12144 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
12145 IEM_MC_PREPARE_FPU_USAGE();
12146 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12147 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12148 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
12149 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12150 } IEM_MC_ELSE() {
12151 IEM_MC_IF_FCW_IM() {
12152 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12153 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
12154 } IEM_MC_ENDIF();
12155 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12156 } IEM_MC_ENDIF();
12157 IEM_MC_ADVANCE_RIP_AND_FINISH();
12158
12159 IEM_MC_END();
12160}
12161
12162
12163/** Opcode 0xdf !11/3. */
12164FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
12165{
12166 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
12167 IEM_MC_BEGIN(3, 2, 0, 0);
12168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12169 IEM_MC_LOCAL(uint16_t, u16Fsw);
12170 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12171 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12172 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12173
12174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12176 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12177 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12178
12179 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
12180 IEM_MC_PREPARE_FPU_USAGE();
12181 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12182 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12183 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
12184 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12185 } IEM_MC_ELSE() {
12186 IEM_MC_IF_FCW_IM() {
12187 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12188 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
12189 } IEM_MC_ENDIF();
12190 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12191 } IEM_MC_ENDIF();
12192 IEM_MC_ADVANCE_RIP_AND_FINISH();
12193
12194 IEM_MC_END();
12195}
12196
12197
12198/** Opcode 0xdf !11/4. */
12199FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
12200{
12201 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
12202
12203 IEM_MC_BEGIN(2, 3, 0, 0);
12204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12205 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12206 IEM_MC_LOCAL(RTPBCD80U, d80Val);
12207 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12208 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
12209
12210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12212
12213 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12214 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12215 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12216
12217 IEM_MC_PREPARE_FPU_USAGE();
12218 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12219 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
12220 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12221 } IEM_MC_ELSE() {
12222 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12223 } IEM_MC_ENDIF();
12224 IEM_MC_ADVANCE_RIP_AND_FINISH();
12225
12226 IEM_MC_END();
12227}
12228
12229
12230/** Opcode 0xdf !11/5. */
12231FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
12232{
12233 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
12234
12235 IEM_MC_BEGIN(2, 3, 0, 0);
12236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12237 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12238 IEM_MC_LOCAL(int64_t, i64Val);
12239 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12240 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
12241
12242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12244
12245 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12246 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12247 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12248
12249 IEM_MC_PREPARE_FPU_USAGE();
12250 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12251 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
12252 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12253 } IEM_MC_ELSE() {
12254 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12255 } IEM_MC_ENDIF();
12256 IEM_MC_ADVANCE_RIP_AND_FINISH();
12257
12258 IEM_MC_END();
12259}
12260
12261
12262/** Opcode 0xdf !11/6. */
12263FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
12264{
12265 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
12266 IEM_MC_BEGIN(3, 2, 0, 0);
12267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12268 IEM_MC_LOCAL(uint16_t, u16Fsw);
12269 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12270 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
12271 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12272
12273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12275 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12276 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12277
12278 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
12279 IEM_MC_PREPARE_FPU_USAGE();
12280 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12281 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
12282 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
12283 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12284 } IEM_MC_ELSE() {
12285 IEM_MC_IF_FCW_IM() {
12286 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
12287 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
12288 } IEM_MC_ENDIF();
12289 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12290 } IEM_MC_ENDIF();
12291 IEM_MC_ADVANCE_RIP_AND_FINISH();
12292
12293 IEM_MC_END();
12294}
12295
12296
12297/** Opcode 0xdf !11/7. */
12298FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
12299{
12300 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
12301 IEM_MC_BEGIN(3, 2, 0, 0);
12302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12303 IEM_MC_LOCAL(uint16_t, u16Fsw);
12304 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12305 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12306 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12307
12308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12310 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12311 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12312
12313 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
12314 IEM_MC_PREPARE_FPU_USAGE();
12315 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12316 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12317 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
12318 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12319 } IEM_MC_ELSE() {
12320 IEM_MC_IF_FCW_IM() {
12321 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12322 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
12323 } IEM_MC_ENDIF();
12324 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12325 } IEM_MC_ENDIF();
12326 IEM_MC_ADVANCE_RIP_AND_FINISH();
12327
12328 IEM_MC_END();
12329}
12330
12331
12332/**
12333 * @opcode 0xdf
12334 */
12335FNIEMOP_DEF(iemOp_EscF7)
12336{
12337 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12338 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
12339 if (IEM_IS_MODRM_REG_MODE(bRm))
12340 {
12341 switch (IEM_GET_MODRM_REG_8(bRm))
12342 {
12343 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
12344 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
12345 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
12346 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
12347 case 4: if (bRm == 0xe0)
12348 return FNIEMOP_CALL(iemOp_fnstsw_ax);
12349 IEMOP_RAISE_INVALID_OPCODE_RET();
12350 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
12351 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
12352 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12353 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12354 }
12355 }
12356 else
12357 {
12358 switch (IEM_GET_MODRM_REG_8(bRm))
12359 {
12360 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
12361 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
12362 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
12363 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
12364 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
12365 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
12366 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
12367 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
12368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12369 }
12370 }
12371}
12372
12373
12374/**
12375 * @opcode 0xe0
12376 */
12377FNIEMOP_DEF(iemOp_loopne_Jb)
12378{
12379 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
12380 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12381 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12382
12383 switch (pVCpu->iem.s.enmEffAddrMode)
12384 {
12385 case IEMMODE_16BIT:
12386 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12388 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12389 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12390 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12391 } IEM_MC_ELSE() {
12392 IEM_MC_ADVANCE_RIP_AND_FINISH();
12393 } IEM_MC_ENDIF();
12394 IEM_MC_END();
12395 break;
12396
12397 case IEMMODE_32BIT:
12398 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12400 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12401 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12402 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12403 } IEM_MC_ELSE() {
12404 IEM_MC_ADVANCE_RIP_AND_FINISH();
12405 } IEM_MC_ENDIF();
12406 IEM_MC_END();
12407 break;
12408
12409 case IEMMODE_64BIT:
12410 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12412 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12413 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12414 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12415 } IEM_MC_ELSE() {
12416 IEM_MC_ADVANCE_RIP_AND_FINISH();
12417 } IEM_MC_ENDIF();
12418 IEM_MC_END();
12419 break;
12420
12421 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12422 }
12423}
12424
12425
12426/**
12427 * @opcode 0xe1
12428 */
12429FNIEMOP_DEF(iemOp_loope_Jb)
12430{
12431 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
12432 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12433 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12434
12435 switch (pVCpu->iem.s.enmEffAddrMode)
12436 {
12437 case IEMMODE_16BIT:
12438 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12440 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12441 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
12442 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12443 } IEM_MC_ELSE() {
12444 IEM_MC_ADVANCE_RIP_AND_FINISH();
12445 } IEM_MC_ENDIF();
12446 IEM_MC_END();
12447 break;
12448
12449 case IEMMODE_32BIT:
12450 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12452 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12453 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
12454 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12455 } IEM_MC_ELSE() {
12456 IEM_MC_ADVANCE_RIP_AND_FINISH();
12457 } IEM_MC_ENDIF();
12458 IEM_MC_END();
12459 break;
12460
12461 case IEMMODE_64BIT:
12462 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12464 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12465 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
12466 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12467 } IEM_MC_ELSE() {
12468 IEM_MC_ADVANCE_RIP_AND_FINISH();
12469 } IEM_MC_ENDIF();
12470 IEM_MC_END();
12471 break;
12472
12473 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12474 }
12475}
12476
12477
12478/**
12479 * @opcode 0xe2
12480 */
12481FNIEMOP_DEF(iemOp_loop_Jb)
12482{
12483 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
12484 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12485 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12486
12487 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
12488 * using the 32-bit operand size override. How can that be restarted? See
12489 * weird pseudo code in intel manual. */
12490
12491 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
12492 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
12493 * the loop causes guest crashes, but when logging it's nice to skip a few million
12494 * lines of useless output. */
12495#if defined(LOG_ENABLED)
12496 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
12497 switch (pVCpu->iem.s.enmEffAddrMode)
12498 {
12499 case IEMMODE_16BIT:
12500 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12502 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
12503 IEM_MC_ADVANCE_RIP_AND_FINISH();
12504 IEM_MC_END();
12505 break;
12506
12507 case IEMMODE_32BIT:
12508 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12510 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
12511 IEM_MC_ADVANCE_RIP_AND_FINISH();
12512 IEM_MC_END();
12513 break;
12514
12515 case IEMMODE_64BIT:
12516 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12518 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
12519 IEM_MC_ADVANCE_RIP_AND_FINISH();
12520 IEM_MC_END();
12521 break;
12522
12523 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12524 }
12525#endif
12526
12527 switch (pVCpu->iem.s.enmEffAddrMode)
12528 {
12529 case IEMMODE_16BIT:
12530 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12532 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12533 IEM_MC_IF_CX_IS_NZ() {
12534 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12535 } IEM_MC_ELSE() {
12536 IEM_MC_ADVANCE_RIP_AND_FINISH();
12537 } IEM_MC_ENDIF();
12538 IEM_MC_END();
12539 break;
12540
12541 case IEMMODE_32BIT:
12542 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12544 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12545 IEM_MC_IF_ECX_IS_NZ() {
12546 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12547 } IEM_MC_ELSE() {
12548 IEM_MC_ADVANCE_RIP_AND_FINISH();
12549 } IEM_MC_ENDIF();
12550 IEM_MC_END();
12551 break;
12552
12553 case IEMMODE_64BIT:
12554 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12556 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12557 IEM_MC_IF_RCX_IS_NZ() {
12558 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12559 } IEM_MC_ELSE() {
12560 IEM_MC_ADVANCE_RIP_AND_FINISH();
12561 } IEM_MC_ENDIF();
12562 IEM_MC_END();
12563 break;
12564
12565 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12566 }
12567}
12568
12569
12570/**
12571 * @opcode 0xe3
12572 */
12573FNIEMOP_DEF(iemOp_jecxz_Jb)
12574{
12575 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
12576 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12577 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12578
12579 switch (pVCpu->iem.s.enmEffAddrMode)
12580 {
12581 case IEMMODE_16BIT:
12582 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12584 IEM_MC_IF_CX_IS_NZ() {
12585 IEM_MC_ADVANCE_RIP_AND_FINISH();
12586 } IEM_MC_ELSE() {
12587 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12588 } IEM_MC_ENDIF();
12589 IEM_MC_END();
12590 break;
12591
12592 case IEMMODE_32BIT:
12593 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12595 IEM_MC_IF_ECX_IS_NZ() {
12596 IEM_MC_ADVANCE_RIP_AND_FINISH();
12597 } IEM_MC_ELSE() {
12598 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12599 } IEM_MC_ENDIF();
12600 IEM_MC_END();
12601 break;
12602
12603 case IEMMODE_64BIT:
12604 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12606 IEM_MC_IF_RCX_IS_NZ() {
12607 IEM_MC_ADVANCE_RIP_AND_FINISH();
12608 } IEM_MC_ELSE() {
12609 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12610 } IEM_MC_ENDIF();
12611 IEM_MC_END();
12612 break;
12613
12614 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12615 }
12616}
12617
12618
12619/** Opcode 0xe4 */
12620FNIEMOP_DEF(iemOp_in_AL_Ib)
12621{
12622 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
12623 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12625 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12626 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12627}
12628
12629
12630/** Opcode 0xe5 */
12631FNIEMOP_DEF(iemOp_in_eAX_Ib)
12632{
12633 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
12634 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12636 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12637 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12638 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12639}
12640
12641
12642/** Opcode 0xe6 */
12643FNIEMOP_DEF(iemOp_out_Ib_AL)
12644{
12645 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
12646 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12648 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12649 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12650}
12651
12652
12653/** Opcode 0xe7 */
12654FNIEMOP_DEF(iemOp_out_Ib_eAX)
12655{
12656 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
12657 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12659 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12660 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12661 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12662}
12663
12664
12665/**
12666 * @opcode 0xe8
12667 */
12668FNIEMOP_DEF(iemOp_call_Jv)
12669{
12670 IEMOP_MNEMONIC(call_Jv, "call Jv");
12671 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12672 switch (pVCpu->iem.s.enmEffOpSize)
12673 {
12674 case IEMMODE_16BIT:
12675 {
12676 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12677 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
12678 iemCImpl_call_rel_16, (int16_t)u16Imm);
12679 }
12680
12681 case IEMMODE_32BIT:
12682 {
12683 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12684 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
12685 iemCImpl_call_rel_32, (int32_t)u32Imm);
12686 }
12687
12688 case IEMMODE_64BIT:
12689 {
12690 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12691 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
12692 iemCImpl_call_rel_64, u64Imm);
12693 }
12694
12695 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12696 }
12697}
12698
12699
12700/**
12701 * @opcode 0xe9
12702 */
12703FNIEMOP_DEF(iemOp_jmp_Jv)
12704{
12705 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
12706 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12707 switch (pVCpu->iem.s.enmEffOpSize)
12708 {
12709 case IEMMODE_16BIT:
12710 IEM_MC_BEGIN(0, 0, 0, 0);
12711 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
12712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12713 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
12714 IEM_MC_END();
12715 break;
12716
12717 case IEMMODE_64BIT:
12718 case IEMMODE_32BIT:
12719 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12720 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
12721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12722 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
12723 IEM_MC_END();
12724 break;
12725
12726 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12727 }
12728}
12729
12730
12731/**
12732 * @opcode 0xea
12733 */
12734FNIEMOP_DEF(iemOp_jmp_Ap)
12735{
12736 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
12737 IEMOP_HLP_NO_64BIT();
12738
12739 /* Decode the far pointer address and pass it on to the far call C implementation. */
12740 uint32_t off32Seg;
12741 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12742 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
12743 else
12744 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
12745 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
12746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12747 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
12748 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
12749 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
12750 /** @todo make task-switches, ring-switches, ++ return non-zero status */
12751}
12752
12753
12754/**
12755 * @opcode 0xeb
12756 */
12757FNIEMOP_DEF(iemOp_jmp_Jb)
12758{
12759 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
12760 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12761 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12762
12763 IEM_MC_BEGIN(0, 0, 0, 0);
12764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12765 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12766 IEM_MC_END();
12767}
12768
12769
12770/** Opcode 0xec */
12771FNIEMOP_DEF(iemOp_in_AL_DX)
12772{
12773 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
12774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12775 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12776 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12777 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
12778}
12779
12780
12781/** Opcode 0xed */
12782FNIEMOP_DEF(iemOp_in_eAX_DX)
12783{
12784 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
12785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12786 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12787 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12788 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12789 pVCpu->iem.s.enmEffAddrMode);
12790}
12791
12792
12793/** Opcode 0xee */
12794FNIEMOP_DEF(iemOp_out_DX_AL)
12795{
12796 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
12797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12798 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12799 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
12800}
12801
12802
12803/** Opcode 0xef */
12804FNIEMOP_DEF(iemOp_out_DX_eAX)
12805{
12806 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
12807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12808 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12809 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12810 pVCpu->iem.s.enmEffAddrMode);
12811}
12812
12813
12814/**
12815 * @opcode 0xf0
12816 */
12817FNIEMOP_DEF(iemOp_lock)
12818{
12819 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
12820 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12821 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
12822
12823 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12824 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12825}
12826
12827
12828/**
12829 * @opcode 0xf1
12830 */
12831FNIEMOP_DEF(iemOp_int1)
12832{
12833 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
12834 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
12835 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
12836 * LOADALL memo. Needs some testing. */
12837 IEMOP_HLP_MIN_386();
12838 /** @todo testcase! */
12839 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
12840 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
12841 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
12842}
12843
12844
12845/**
12846 * @opcode 0xf2
12847 */
12848FNIEMOP_DEF(iemOp_repne)
12849{
12850 /* This overrides any previous REPE prefix. */
12851 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
12852 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
12853 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
12854
12855 /* For the 4 entry opcode tables, REPNZ overrides any previous
12856 REPZ and operand size prefixes. */
12857 pVCpu->iem.s.idxPrefix = 3;
12858
12859 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12860 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12861}
12862
12863
12864/**
12865 * @opcode 0xf3
12866 */
12867FNIEMOP_DEF(iemOp_repe)
12868{
12869 /* This overrides any previous REPNE prefix. */
12870 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
12871 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
12872 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
12873
12874 /* For the 4 entry opcode tables, REPNZ overrides any previous
12875 REPNZ and operand size prefixes. */
12876 pVCpu->iem.s.idxPrefix = 2;
12877
12878 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12879 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12880}
12881
12882
12883/**
12884 * @opcode 0xf4
12885 */
12886FNIEMOP_DEF(iemOp_hlt)
12887{
12888 IEMOP_MNEMONIC(hlt, "hlt");
12889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12890 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_hlt);
12891}
12892
12893
12894/**
12895 * @opcode 0xf5
12896 */
12897FNIEMOP_DEF(iemOp_cmc)
12898{
12899 IEMOP_MNEMONIC(cmc, "cmc");
12900 IEM_MC_BEGIN(0, 0, 0, 0);
12901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12902 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
12903 IEM_MC_ADVANCE_RIP_AND_FINISH();
12904 IEM_MC_END();
12905}
12906
12907
12908/**
12909 * Body for of 'inc/dec/not/neg Eb'.
12910 */
12911#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
12912 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
12913 { \
12914 /* register access */ \
12915 IEM_MC_BEGIN(2, 0, 0, 0); \
12916 IEMOP_HLP_DONE_DECODING(); \
12917 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12918 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12919 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
12920 IEM_MC_REF_EFLAGS(pEFlags); \
12921 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12922 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12923 IEM_MC_END(); \
12924 } \
12925 else \
12926 { \
12927 /* memory access. */ \
12928 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
12929 { \
12930 IEM_MC_BEGIN(2, 2, 0, 0); \
12931 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12932 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12933 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12934 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12935 \
12936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12937 IEMOP_HLP_DONE_DECODING(); \
12938 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12939 IEM_MC_FETCH_EFLAGS(EFlags); \
12940 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12941 \
12942 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12943 IEM_MC_COMMIT_EFLAGS(EFlags); \
12944 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12945 IEM_MC_END(); \
12946 } \
12947 else \
12948 { \
12949 IEM_MC_BEGIN(2, 2, 0, 0); \
12950 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12951 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12953 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12954 \
12955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12956 IEMOP_HLP_DONE_DECODING(); \
12957 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12958 IEM_MC_FETCH_EFLAGS(EFlags); \
12959 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
12960 \
12961 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12962 IEM_MC_COMMIT_EFLAGS(EFlags); \
12963 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12964 IEM_MC_END(); \
12965 } \
12966 } \
12967 (void)0
12968
12969
12970/**
12971 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
12972 */
12973#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
12974 if (IEM_IS_MODRM_REG_MODE(bRm)) \
12975 { \
12976 /* \
12977 * Register target \
12978 */ \
12979 switch (pVCpu->iem.s.enmEffOpSize) \
12980 { \
12981 case IEMMODE_16BIT: \
12982 IEM_MC_BEGIN(2, 0, 0, 0); \
12983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12984 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12985 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12986 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12987 IEM_MC_REF_EFLAGS(pEFlags); \
12988 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
12989 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12990 IEM_MC_END(); \
12991 break; \
12992 \
12993 case IEMMODE_32BIT: \
12994 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0); \
12995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12996 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12997 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12998 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12999 IEM_MC_REF_EFLAGS(pEFlags); \
13000 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13001 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
13002 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13003 IEM_MC_END(); \
13004 break; \
13005 \
13006 case IEMMODE_64BIT: \
13007 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0); \
13008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13009 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13010 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13011 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13012 IEM_MC_REF_EFLAGS(pEFlags); \
13013 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13014 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13015 IEM_MC_END(); \
13016 break; \
13017 \
13018 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13019 } \
13020 } \
13021 else \
13022 { \
13023 /* \
13024 * Memory target. \
13025 */ \
13026 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
13027 { \
13028 switch (pVCpu->iem.s.enmEffOpSize) \
13029 { \
13030 case IEMMODE_16BIT: \
13031 IEM_MC_BEGIN(2, 3, 0, 0); \
13032 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13033 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13034 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13035 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13036 \
13037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13039 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13040 IEM_MC_FETCH_EFLAGS(EFlags); \
13041 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13042 \
13043 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
13044 IEM_MC_COMMIT_EFLAGS(EFlags); \
13045 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13046 IEM_MC_END(); \
13047 break; \
13048 \
13049 case IEMMODE_32BIT: \
13050 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13051 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13052 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13054 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13055 \
13056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13058 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13059 IEM_MC_FETCH_EFLAGS(EFlags); \
13060 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13061 \
13062 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
13063 IEM_MC_COMMIT_EFLAGS(EFlags); \
13064 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13065 IEM_MC_END(); \
13066 break; \
13067 \
13068 case IEMMODE_64BIT: \
13069 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13070 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13071 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13072 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13073 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13074 \
13075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13077 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13078 IEM_MC_FETCH_EFLAGS(EFlags); \
13079 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13080 \
13081 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
13082 IEM_MC_COMMIT_EFLAGS(EFlags); \
13083 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13084 IEM_MC_END(); \
13085 break; \
13086 \
13087 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13088 } \
13089 } \
13090 else \
13091 { \
13092 (void)0
13093
13094#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
13095 switch (pVCpu->iem.s.enmEffOpSize) \
13096 { \
13097 case IEMMODE_16BIT: \
13098 IEM_MC_BEGIN(2, 3, 0, 0); \
13099 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13100 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13102 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13103 \
13104 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13105 IEMOP_HLP_DONE_DECODING(); \
13106 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13107 IEM_MC_FETCH_EFLAGS(EFlags); \
13108 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
13109 \
13110 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
13111 IEM_MC_COMMIT_EFLAGS(EFlags); \
13112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13113 IEM_MC_END(); \
13114 break; \
13115 \
13116 case IEMMODE_32BIT: \
13117 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13118 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13119 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13121 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13122 \
13123 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13124 IEMOP_HLP_DONE_DECODING(); \
13125 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13126 IEM_MC_FETCH_EFLAGS(EFlags); \
13127 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
13128 \
13129 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
13130 IEM_MC_COMMIT_EFLAGS(EFlags); \
13131 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13132 IEM_MC_END(); \
13133 break; \
13134 \
13135 case IEMMODE_64BIT: \
13136 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13137 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13138 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13140 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13141 \
13142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13143 IEMOP_HLP_DONE_DECODING(); \
13144 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13145 IEM_MC_FETCH_EFLAGS(EFlags); \
13146 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
13147 \
13148 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
13149 IEM_MC_COMMIT_EFLAGS(EFlags); \
13150 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13151 IEM_MC_END(); \
13152 break; \
13153 \
13154 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13155 } \
13156 } \
13157 } \
13158 (void)0
13159
13160
13161/**
13162 * @opmaps grp3_f6
13163 * @opcode /0
13164 * @todo also /1
13165 */
13166FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
13167{
13168 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
13169 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
13170
13171 if (IEM_IS_MODRM_REG_MODE(bRm))
13172 {
13173 /* register access */
13174 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13175 IEM_MC_BEGIN(3, 0, 0, 0);
13176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13177 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13178 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
13179 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13180 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13181 IEM_MC_REF_EFLAGS(pEFlags);
13182 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13183 IEM_MC_ADVANCE_RIP_AND_FINISH();
13184 IEM_MC_END();
13185 }
13186 else
13187 {
13188 /* memory access. */
13189 IEM_MC_BEGIN(3, 3, 0, 0);
13190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13192
13193 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13195
13196 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13197 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
13198 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13199
13200 IEM_MC_ARG_CONST(uint8_t, u8Src, u8Imm, 1);
13201 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13202 IEM_MC_FETCH_EFLAGS(EFlags);
13203 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13204
13205 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo);
13206 IEM_MC_COMMIT_EFLAGS(EFlags);
13207 IEM_MC_ADVANCE_RIP_AND_FINISH();
13208 IEM_MC_END();
13209 }
13210}
13211
13212
13213/** Opcode 0xf6 /4, /5, /6 and /7. */
13214FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
13215{
13216 if (IEM_IS_MODRM_REG_MODE(bRm))
13217 {
13218 /* register access */
13219 IEM_MC_BEGIN(3, 1, 0, 0);
13220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13221 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13222 IEM_MC_ARG(uint8_t, u8Value, 1);
13223 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13224 IEM_MC_LOCAL(int32_t, rc);
13225
13226 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13227 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13228 IEM_MC_REF_EFLAGS(pEFlags);
13229 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
13230 IEM_MC_IF_LOCAL_IS_Z(rc) {
13231 IEM_MC_ADVANCE_RIP_AND_FINISH();
13232 } IEM_MC_ELSE() {
13233 IEM_MC_RAISE_DIVIDE_ERROR();
13234 } IEM_MC_ENDIF();
13235
13236 IEM_MC_END();
13237 }
13238 else
13239 {
13240 /* memory access. */
13241 IEM_MC_BEGIN(3, 2, 0, 0);
13242 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13243 IEM_MC_ARG(uint8_t, u8Value, 1);
13244 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13246 IEM_MC_LOCAL(int32_t, rc);
13247
13248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13250 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13251 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13252 IEM_MC_REF_EFLAGS(pEFlags);
13253 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
13254 IEM_MC_IF_LOCAL_IS_Z(rc) {
13255 IEM_MC_ADVANCE_RIP_AND_FINISH();
13256 } IEM_MC_ELSE() {
13257 IEM_MC_RAISE_DIVIDE_ERROR();
13258 } IEM_MC_ENDIF();
13259
13260 IEM_MC_END();
13261 }
13262}
13263
13264
13265/** Opcode 0xf7 /4, /5, /6 and /7. */
13266FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
13267{
13268 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13269
13270 if (IEM_IS_MODRM_REG_MODE(bRm))
13271 {
13272 /* register access */
13273 switch (pVCpu->iem.s.enmEffOpSize)
13274 {
13275 case IEMMODE_16BIT:
13276 IEM_MC_BEGIN(4, 1, 0, 0);
13277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13278 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13279 IEM_MC_ARG(uint16_t *, pu16DX, 1);
13280 IEM_MC_ARG(uint16_t, u16Value, 2);
13281 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13282 IEM_MC_LOCAL(int32_t, rc);
13283
13284 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13285 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13286 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
13287 IEM_MC_REF_EFLAGS(pEFlags);
13288 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
13289 IEM_MC_IF_LOCAL_IS_Z(rc) {
13290 IEM_MC_ADVANCE_RIP_AND_FINISH();
13291 } IEM_MC_ELSE() {
13292 IEM_MC_RAISE_DIVIDE_ERROR();
13293 } IEM_MC_ENDIF();
13294
13295 IEM_MC_END();
13296 break;
13297
13298 case IEMMODE_32BIT:
13299 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386, 0);
13300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13301 IEM_MC_ARG(uint32_t *, pu32AX, 0);
13302 IEM_MC_ARG(uint32_t *, pu32DX, 1);
13303 IEM_MC_ARG(uint32_t, u32Value, 2);
13304 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13305 IEM_MC_LOCAL(int32_t, rc);
13306
13307 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13308 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
13309 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
13310 IEM_MC_REF_EFLAGS(pEFlags);
13311 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
13312 IEM_MC_IF_LOCAL_IS_Z(rc) {
13313 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
13314 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
13315 IEM_MC_ADVANCE_RIP_AND_FINISH();
13316 } IEM_MC_ELSE() {
13317 IEM_MC_RAISE_DIVIDE_ERROR();
13318 } IEM_MC_ENDIF();
13319
13320 IEM_MC_END();
13321 break;
13322
13323 case IEMMODE_64BIT:
13324 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0);
13325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13326 IEM_MC_ARG(uint64_t *, pu64AX, 0);
13327 IEM_MC_ARG(uint64_t *, pu64DX, 1);
13328 IEM_MC_ARG(uint64_t, u64Value, 2);
13329 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13330 IEM_MC_LOCAL(int32_t, rc);
13331
13332 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13333 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
13334 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
13335 IEM_MC_REF_EFLAGS(pEFlags);
13336 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
13337 IEM_MC_IF_LOCAL_IS_Z(rc) {
13338 IEM_MC_ADVANCE_RIP_AND_FINISH();
13339 } IEM_MC_ELSE() {
13340 IEM_MC_RAISE_DIVIDE_ERROR();
13341 } IEM_MC_ENDIF();
13342
13343 IEM_MC_END();
13344 break;
13345
13346 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13347 }
13348 }
13349 else
13350 {
13351 /* memory access. */
13352 switch (pVCpu->iem.s.enmEffOpSize)
13353 {
13354 case IEMMODE_16BIT:
13355 IEM_MC_BEGIN(4, 2, 0, 0);
13356 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13357 IEM_MC_ARG(uint16_t *, pu16DX, 1);
13358 IEM_MC_ARG(uint16_t, u16Value, 2);
13359 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13361 IEM_MC_LOCAL(int32_t, rc);
13362
13363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13365 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13366 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13367 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
13368 IEM_MC_REF_EFLAGS(pEFlags);
13369 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
13370 IEM_MC_IF_LOCAL_IS_Z(rc) {
13371 IEM_MC_ADVANCE_RIP_AND_FINISH();
13372 } IEM_MC_ELSE() {
13373 IEM_MC_RAISE_DIVIDE_ERROR();
13374 } IEM_MC_ENDIF();
13375
13376 IEM_MC_END();
13377 break;
13378
13379 case IEMMODE_32BIT:
13380 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386, 0);
13381 IEM_MC_ARG(uint32_t *, pu32AX, 0);
13382 IEM_MC_ARG(uint32_t *, pu32DX, 1);
13383 IEM_MC_ARG(uint32_t, u32Value, 2);
13384 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13386 IEM_MC_LOCAL(int32_t, rc);
13387
13388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13390 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13391 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
13392 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
13393 IEM_MC_REF_EFLAGS(pEFlags);
13394 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
13395 IEM_MC_IF_LOCAL_IS_Z(rc) {
13396 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
13397 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
13398 IEM_MC_ADVANCE_RIP_AND_FINISH();
13399 } IEM_MC_ELSE() {
13400 IEM_MC_RAISE_DIVIDE_ERROR();
13401 } IEM_MC_ENDIF();
13402
13403 IEM_MC_END();
13404 break;
13405
13406 case IEMMODE_64BIT:
13407 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT, 0);
13408 IEM_MC_ARG(uint64_t *, pu64AX, 0);
13409 IEM_MC_ARG(uint64_t *, pu64DX, 1);
13410 IEM_MC_ARG(uint64_t, u64Value, 2);
13411 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13413 IEM_MC_LOCAL(int32_t, rc);
13414
13415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13417 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13418 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
13419 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
13420 IEM_MC_REF_EFLAGS(pEFlags);
13421 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
13422 IEM_MC_IF_LOCAL_IS_Z(rc) {
13423 IEM_MC_ADVANCE_RIP_AND_FINISH();
13424 } IEM_MC_ELSE() {
13425 IEM_MC_RAISE_DIVIDE_ERROR();
13426 } IEM_MC_ENDIF();
13427
13428 IEM_MC_END();
13429 break;
13430
13431 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13432 }
13433 }
13434}
13435
13436
13437/**
13438 * @opmaps grp3_f6
13439 * @opcode /2
13440 */
13441FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
13442{
13443 IEMOP_MNEMONIC(not_Eb, "not Eb");
13444 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
13445}
13446
13447
13448/**
13449 * @opmaps grp3_f6
13450 * @opcode /3
13451 */
13452FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
13453{
13454 IEMOP_MNEMONIC(net_Eb, "neg Eb");
13455 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
13456}
13457
13458
13459/**
13460 * @opcode 0xf6
13461 */
13462FNIEMOP_DEF(iemOp_Grp3_Eb)
13463{
13464 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13465 switch (IEM_GET_MODRM_REG_8(bRm))
13466 {
13467 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
13468 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
13469 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
13470 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
13471 case 4:
13472 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
13473 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13474 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
13475 case 5:
13476 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
13477 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13478 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
13479 case 6:
13480 IEMOP_MNEMONIC(div_Eb, "div Eb");
13481 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13482 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
13483 case 7:
13484 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
13485 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13486 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
13487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13488 }
13489}
13490
13491
13492/** Opcode 0xf7 /0. */
13493FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
13494{
13495 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
13496 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
13497
13498 if (IEM_IS_MODRM_REG_MODE(bRm))
13499 {
13500 /* register access */
13501 switch (pVCpu->iem.s.enmEffOpSize)
13502 {
13503 case IEMMODE_16BIT:
13504 IEM_MC_BEGIN(3, 0, 0, 0);
13505 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13507 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13508 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
13509 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13510 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13511 IEM_MC_REF_EFLAGS(pEFlags);
13512 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13513 IEM_MC_ADVANCE_RIP_AND_FINISH();
13514 IEM_MC_END();
13515 break;
13516
13517 case IEMMODE_32BIT:
13518 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
13519 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13521 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13522 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
13523 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13524 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13525 IEM_MC_REF_EFLAGS(pEFlags);
13526 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13527 /* No clearing the high dword here - test doesn't write back the result. */
13528 IEM_MC_ADVANCE_RIP_AND_FINISH();
13529 IEM_MC_END();
13530 break;
13531
13532 case IEMMODE_64BIT:
13533 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
13534 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13536 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13537 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
13538 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13539 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13540 IEM_MC_REF_EFLAGS(pEFlags);
13541 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13542 IEM_MC_ADVANCE_RIP_AND_FINISH();
13543 IEM_MC_END();
13544 break;
13545
13546 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13547 }
13548 }
13549 else
13550 {
13551 /* memory access. */
13552 switch (pVCpu->iem.s.enmEffOpSize)
13553 {
13554 case IEMMODE_16BIT:
13555 IEM_MC_BEGIN(3, 3, 0, 0);
13556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13558
13559 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13561
13562 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13563 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
13564 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13565
13566 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
13567 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13568 IEM_MC_FETCH_EFLAGS(EFlags);
13569 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13570
13571 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo);
13572 IEM_MC_COMMIT_EFLAGS(EFlags);
13573 IEM_MC_ADVANCE_RIP_AND_FINISH();
13574 IEM_MC_END();
13575 break;
13576
13577 case IEMMODE_32BIT:
13578 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
13579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13581
13582 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13584
13585 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13586 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
13587 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13588
13589 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
13590 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13591 IEM_MC_FETCH_EFLAGS(EFlags);
13592 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13593
13594 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo);
13595 IEM_MC_COMMIT_EFLAGS(EFlags);
13596 IEM_MC_ADVANCE_RIP_AND_FINISH();
13597 IEM_MC_END();
13598 break;
13599
13600 case IEMMODE_64BIT:
13601 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
13602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13604
13605 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13607
13608 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
13609 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13610 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13611
13612 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1);
13613 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13614 IEM_MC_FETCH_EFLAGS(EFlags);
13615 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13616
13617 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo);
13618 IEM_MC_COMMIT_EFLAGS(EFlags);
13619 IEM_MC_ADVANCE_RIP_AND_FINISH();
13620 IEM_MC_END();
13621 break;
13622
13623 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13624 }
13625 }
13626}
13627
13628
13629/** Opcode 0xf7 /2. */
13630FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
13631{
13632 IEMOP_MNEMONIC(not_Ev, "not Ev");
13633 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
13634 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
13635}
13636
13637
13638/** Opcode 0xf7 /3. */
13639FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
13640{
13641 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
13642 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
13643 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
13644}
13645
13646
13647/**
13648 * @opcode 0xf7
13649 */
13650FNIEMOP_DEF(iemOp_Grp3_Ev)
13651{
13652 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13653 switch (IEM_GET_MODRM_REG_8(bRm))
13654 {
13655 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13656 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13657 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
13658 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
13659 case 4:
13660 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
13661 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13662 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
13663 case 5:
13664 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
13665 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13666 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
13667 case 6:
13668 IEMOP_MNEMONIC(div_Ev, "div Ev");
13669 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13670 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
13671 case 7:
13672 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
13673 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13674 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
13675 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13676 }
13677}
13678
13679
13680/**
13681 * @opcode 0xf8
13682 */
13683FNIEMOP_DEF(iemOp_clc)
13684{
13685 IEMOP_MNEMONIC(clc, "clc");
13686 IEM_MC_BEGIN(0, 0, 0, 0);
13687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13688 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
13689 IEM_MC_ADVANCE_RIP_AND_FINISH();
13690 IEM_MC_END();
13691}
13692
13693
13694/**
13695 * @opcode 0xf9
13696 */
13697FNIEMOP_DEF(iemOp_stc)
13698{
13699 IEMOP_MNEMONIC(stc, "stc");
13700 IEM_MC_BEGIN(0, 0, 0, 0);
13701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13702 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
13703 IEM_MC_ADVANCE_RIP_AND_FINISH();
13704 IEM_MC_END();
13705}
13706
13707
13708/**
13709 * @opcode 0xfa
13710 */
13711FNIEMOP_DEF(iemOp_cli)
13712{
13713 IEMOP_MNEMONIC(cli, "cli");
13714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13715 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, 0, iemCImpl_cli);
13716}
13717
13718
13719FNIEMOP_DEF(iemOp_sti)
13720{
13721 IEMOP_MNEMONIC(sti, "sti");
13722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13723 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
13724 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, 0, iemCImpl_sti);
13725}
13726
13727
13728/**
13729 * @opcode 0xfc
13730 */
13731FNIEMOP_DEF(iemOp_cld)
13732{
13733 IEMOP_MNEMONIC(cld, "cld");
13734 IEM_MC_BEGIN(0, 0, 0, 0);
13735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13736 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
13737 IEM_MC_ADVANCE_RIP_AND_FINISH();
13738 IEM_MC_END();
13739}
13740
13741
13742/**
13743 * @opcode 0xfd
13744 */
13745FNIEMOP_DEF(iemOp_std)
13746{
13747 IEMOP_MNEMONIC(std, "std");
13748 IEM_MC_BEGIN(0, 0, 0, 0);
13749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13750 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
13751 IEM_MC_ADVANCE_RIP_AND_FINISH();
13752 IEM_MC_END();
13753}
13754
13755
13756/**
13757 * @opmaps grp4
13758 * @opcode /0
13759 */
13760FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
13761{
13762 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
13763 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
13764}
13765
13766
13767/**
13768 * @opmaps grp4
13769 * @opcode /1
13770 */
13771FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
13772{
13773 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
13774 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
13775}
13776
13777
13778/**
13779 * @opcode 0xfe
13780 */
13781FNIEMOP_DEF(iemOp_Grp4)
13782{
13783 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13784 switch (IEM_GET_MODRM_REG_8(bRm))
13785 {
13786 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
13787 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
13788 default:
13789 /** @todo is the eff-addr decoded? */
13790 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
13791 IEMOP_RAISE_INVALID_OPCODE_RET();
13792 }
13793}
13794
13795/** Opcode 0xff /0. */
13796FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
13797{
13798 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
13799 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
13800 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
13801}
13802
13803
13804/** Opcode 0xff /1. */
13805FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
13806{
13807 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
13808 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
13809 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
13810}
13811
13812
13813/**
13814 * Opcode 0xff /2.
13815 * @param bRm The RM byte.
13816 */
13817FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
13818{
13819 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
13820 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13821
13822 if (IEM_IS_MODRM_REG_MODE(bRm))
13823 {
13824 /* The new RIP is taken from a register. */
13825 switch (pVCpu->iem.s.enmEffOpSize)
13826 {
13827 case IEMMODE_16BIT:
13828 IEM_MC_BEGIN(1, 0, 0, 0);
13829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13830 IEM_MC_ARG(uint16_t, u16Target, 0);
13831 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13832 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, iemCImpl_call_16, u16Target);
13833 IEM_MC_END();
13834 break;
13835
13836 case IEMMODE_32BIT:
13837 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
13838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13839 IEM_MC_ARG(uint32_t, u32Target, 0);
13840 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13841 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, iemCImpl_call_32, u32Target);
13842 IEM_MC_END();
13843 break;
13844
13845 case IEMMODE_64BIT:
13846 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
13847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13848 IEM_MC_ARG(uint64_t, u64Target, 0);
13849 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13850 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, iemCImpl_call_64, u64Target);
13851 IEM_MC_END();
13852 break;
13853
13854 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13855 }
13856 }
13857 else
13858 {
13859 /* The new RIP is taken from a register. */
13860 switch (pVCpu->iem.s.enmEffOpSize)
13861 {
13862 case IEMMODE_16BIT:
13863 IEM_MC_BEGIN(1, 1, 0, 0);
13864 IEM_MC_ARG(uint16_t, u16Target, 0);
13865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13868 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13869 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, iemCImpl_call_16, u16Target);
13870 IEM_MC_END();
13871 break;
13872
13873 case IEMMODE_32BIT:
13874 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
13875 IEM_MC_ARG(uint32_t, u32Target, 0);
13876 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13879 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13880 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, iemCImpl_call_32, u32Target);
13881 IEM_MC_END();
13882 break;
13883
13884 case IEMMODE_64BIT:
13885 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT, 0);
13886 IEM_MC_ARG(uint64_t, u64Target, 0);
13887 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13890 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13891 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, iemCImpl_call_64, u64Target);
13892 IEM_MC_END();
13893 break;
13894
13895 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13896 }
13897 }
13898}
13899
13900#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl, a_fCImplExtra) \
13901 /* Registers? How?? */ \
13902 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
13903 { /* likely */ } \
13904 else \
13905 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
13906 \
13907 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
13908 /** @todo what does VIA do? */ \
13909 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
13910 { /* likely */ } \
13911 else \
13912 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
13913 \
13914 /* Far pointer loaded from memory. */ \
13915 switch (pVCpu->iem.s.enmEffOpSize) \
13916 { \
13917 case IEMMODE_16BIT: \
13918 IEM_MC_BEGIN(3, 1, 0, 0); \
13919 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13920 IEM_MC_ARG(uint16_t, offSeg, 1); \
13921 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
13922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13925 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13926 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
13927 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(X86_GREG_xSP); \
13928 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
13929 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
13930 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13931 IEM_MC_END(); \
13932 break; \
13933 \
13934 case IEMMODE_32BIT: \
13935 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
13936 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13937 IEM_MC_ARG(uint32_t, offSeg, 1); \
13938 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
13939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13942 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13943 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
13944 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(X86_GREG_xSP); \
13945 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
13946 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
13947 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13948 IEM_MC_END(); \
13949 break; \
13950 \
13951 case IEMMODE_64BIT: \
13952 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
13953 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0); \
13954 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13955 IEM_MC_ARG(uint64_t, offSeg, 1); \
13956 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
13957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13960 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13961 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
13962 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(X86_GREG_xSP); \
13963 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
13964 | IEM_CIMPL_F_MODE /* no gates */, \
13965 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13966 IEM_MC_END(); \
13967 break; \
13968 \
13969 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13970 } do {} while (0)
13971
13972
13973/**
13974 * Opcode 0xff /3.
13975 * @param bRm The RM byte.
13976 */
13977FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
13978{
13979 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
13980 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf, IEM_CIMPL_F_BRANCH_STACK);
13981}
13982
13983
13984/**
13985 * Opcode 0xff /4.
13986 * @param bRm The RM byte.
13987 */
13988FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
13989{
13990 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
13991 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13992
13993 if (IEM_IS_MODRM_REG_MODE(bRm))
13994 {
13995 /* The new RIP is taken from a register. */
13996 switch (pVCpu->iem.s.enmEffOpSize)
13997 {
13998 case IEMMODE_16BIT:
13999 IEM_MC_BEGIN(0, 1, 0, 0);
14000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14001 IEM_MC_LOCAL(uint16_t, u16Target);
14002 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14003 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14004 IEM_MC_END();
14005 break;
14006
14007 case IEMMODE_32BIT:
14008 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
14009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14010 IEM_MC_LOCAL(uint32_t, u32Target);
14011 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14012 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14013 IEM_MC_END();
14014 break;
14015
14016 case IEMMODE_64BIT:
14017 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
14018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14019 IEM_MC_LOCAL(uint64_t, u64Target);
14020 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14021 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14022 IEM_MC_END();
14023 break;
14024
14025 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14026 }
14027 }
14028 else
14029 {
14030 /* The new RIP is taken from a memory location. */
14031 switch (pVCpu->iem.s.enmEffOpSize)
14032 {
14033 case IEMMODE_16BIT:
14034 IEM_MC_BEGIN(0, 2, 0, 0);
14035 IEM_MC_LOCAL(uint16_t, u16Target);
14036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14039 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14040 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14041 IEM_MC_END();
14042 break;
14043
14044 case IEMMODE_32BIT:
14045 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
14046 IEM_MC_LOCAL(uint32_t, u32Target);
14047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14050 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14051 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14052 IEM_MC_END();
14053 break;
14054
14055 case IEMMODE_64BIT:
14056 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14057 IEM_MC_LOCAL(uint64_t, u64Target);
14058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14061 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14062 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14063 IEM_MC_END();
14064 break;
14065
14066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14067 }
14068 }
14069}
14070
14071
14072/**
14073 * Opcode 0xff /5.
14074 * @param bRm The RM byte.
14075 */
14076FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
14077{
14078 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
14079 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp, 0);
14080}
14081
14082
14083/**
14084 * Opcode 0xff /6.
14085 * @param bRm The RM byte.
14086 */
14087FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
14088{
14089 IEMOP_MNEMONIC(push_Ev, "push Ev");
14090
14091 /* Registers are handled by a common worker. */
14092 if (IEM_IS_MODRM_REG_MODE(bRm))
14093 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
14094
14095 /* Memory we do here. */
14096 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14097 switch (pVCpu->iem.s.enmEffOpSize)
14098 {
14099 case IEMMODE_16BIT:
14100 IEM_MC_BEGIN(0, 2, 0, 0);
14101 IEM_MC_LOCAL(uint16_t, u16Src);
14102 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14105 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14106 IEM_MC_PUSH_U16(u16Src);
14107 IEM_MC_ADVANCE_RIP_AND_FINISH();
14108 IEM_MC_END();
14109 break;
14110
14111 case IEMMODE_32BIT:
14112 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
14113 IEM_MC_LOCAL(uint32_t, u32Src);
14114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14115 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14117 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14118 IEM_MC_PUSH_U32(u32Src);
14119 IEM_MC_ADVANCE_RIP_AND_FINISH();
14120 IEM_MC_END();
14121 break;
14122
14123 case IEMMODE_64BIT:
14124 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14125 IEM_MC_LOCAL(uint64_t, u64Src);
14126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14129 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14130 IEM_MC_PUSH_U64(u64Src);
14131 IEM_MC_ADVANCE_RIP_AND_FINISH();
14132 IEM_MC_END();
14133 break;
14134
14135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14136 }
14137}
14138
14139
14140/**
14141 * @opcode 0xff
14142 */
14143FNIEMOP_DEF(iemOp_Grp5)
14144{
14145 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14146 switch (IEM_GET_MODRM_REG_8(bRm))
14147 {
14148 case 0:
14149 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
14150 case 1:
14151 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
14152 case 2:
14153 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
14154 case 3:
14155 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
14156 case 4:
14157 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
14158 case 5:
14159 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
14160 case 6:
14161 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
14162 case 7:
14163 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
14164 IEMOP_RAISE_INVALID_OPCODE_RET();
14165 }
14166 AssertFailedReturn(VERR_IEM_IPE_3);
14167}
14168
14169
14170
14171const PFNIEMOP g_apfnOneByteMap[256] =
14172{
14173 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
14174 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
14175 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
14176 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
14177 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
14178 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
14179 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
14180 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
14181 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
14182 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
14183 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
14184 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
14185 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
14186 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
14187 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
14188 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
14189 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
14190 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
14191 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
14192 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
14193 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
14194 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
14195 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
14196 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
14197 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
14198 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
14199 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
14200 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
14201 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
14202 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
14203 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
14204 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
14205 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
14206 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
14207 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
14208 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
14209 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
14210 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
14211 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
14212 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
14213 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
14214 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
14215 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
14216 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
14217 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
14218 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
14219 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
14220 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
14221 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
14222 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
14223 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
14224 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
14225 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
14226 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
14227 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
14228 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
14229 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
14230 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
14231 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
14232 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
14233 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
14234 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
14235 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
14236 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
14237};
14238
14239
14240/** @} */
14241
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette