VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 102012

Last change on this file since 102012 was 102011, checked in by vboxsync, 17 months ago

VMM/IEM: Added a flush mask for guest register shadows to the IEM_MC_CALL_CIMPL_X macros to better manage register optimizations when recompiling to native code, replacing the IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG/SREG macros added earlier today. Added a IEM_MC_HINT_FLUSH_GUEST_SHADOW macro for debugging purposes. bugref:10371

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 530.7 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 102011 2023-11-08 22:10:48Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 *
63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
64 */
65#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8) \
66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
67 \
68 /* \
69 * If rm is denoting a register, no more instruction bytes. \
70 */ \
71 if (IEM_IS_MODRM_REG_MODE(bRm)) \
72 { \
73 IEM_MC_BEGIN(3, 0, 0, 0); \
74 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
75 IEM_MC_ARG(uint8_t, u8Src, 1); \
76 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
77 \
78 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
79 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
80 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
81 IEM_MC_REF_EFLAGS(pEFlags); \
82 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
83 \
84 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
85 IEM_MC_END(); \
86 } \
87 else \
88 { \
89 /* \
90 * We're accessing memory. \
91 * Note! We're putting the eflags on the stack here so we can commit them \
92 * after the memory. \
93 */ \
94 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
95 { \
96 IEM_MC_BEGIN(3, 3, 0, 0); \
97 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
98 IEM_MC_ARG(uint8_t, u8Src, 1); \
99 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
101 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
102 \
103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
104 IEMOP_HLP_DONE_DECODING(); \
105 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
106 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
107 IEM_MC_FETCH_EFLAGS(EFlags); \
108 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
109 \
110 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
111 IEM_MC_COMMIT_EFLAGS(EFlags); \
112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
113 IEM_MC_END(); \
114 } \
115 else \
116 { \
117 (void)0
118
119/**
120 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
121 * operands.
122 *
123 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
124 */
125#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8) \
126 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
127 \
128 /* \
129 * If rm is denoting a register, no more instruction bytes. \
130 */ \
131 if (IEM_IS_MODRM_REG_MODE(bRm)) \
132 { \
133 IEM_MC_BEGIN(3, 0, 0, 0); \
134 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
135 IEM_MC_ARG(uint8_t, u8Src, 1); \
136 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
137 \
138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
139 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
140 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
141 IEM_MC_REF_EFLAGS(pEFlags); \
142 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
143 \
144 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
145 IEM_MC_END(); \
146 } \
147 else \
148 { \
149 /* \
150 * We're accessing memory. \
151 * Note! We're putting the eflags on the stack here so we can commit them \
152 * after the memory. \
153 */ \
154 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
155 { \
156 IEM_MC_BEGIN(3, 3, 0, 0); \
157 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
158 IEM_MC_ARG(uint8_t, u8Src, 1); \
159 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
161 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
162 \
163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
164 IEMOP_HLP_DONE_DECODING(); \
165 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
166 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
167 IEM_MC_FETCH_EFLAGS(EFlags); \
168 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
169 \
170 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
171 IEM_MC_COMMIT_EFLAGS(EFlags); \
172 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
173 IEM_MC_END(); \
174 } \
175 else \
176 { \
177 (void)0
178
179#define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \
180 IEMOP_HLP_DONE_DECODING(); \
181 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
182 } \
183 } \
184 (void)0
185
186#define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \
187 IEM_MC_BEGIN(3, 3, 0, 0); \
188 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
189 IEM_MC_ARG(uint8_t, u8Src, 1); \
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
192 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
193 \
194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
195 IEMOP_HLP_DONE_DECODING(); \
196 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
197 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
198 IEM_MC_FETCH_EFLAGS(EFlags); \
199 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
200 \
201 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bMapInfoDst); \
202 IEM_MC_COMMIT_EFLAGS(EFlags); \
203 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
204 IEM_MC_END(); \
205 } \
206 } \
207 (void)0
208
209/**
210 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
211 * destination.
212 */
213#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
215 \
216 /* \
217 * If rm is denoting a register, no more instruction bytes. \
218 */ \
219 if (IEM_IS_MODRM_REG_MODE(bRm)) \
220 { \
221 IEM_MC_BEGIN(3, 0, 0, 0); \
222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
223 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
224 IEM_MC_ARG(uint8_t, u8Src, 1); \
225 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
226 \
227 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
228 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
229 IEM_MC_REF_EFLAGS(pEFlags); \
230 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
231 \
232 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
233 IEM_MC_END(); \
234 } \
235 else \
236 { \
237 /* \
238 * We're accessing memory. \
239 */ \
240 IEM_MC_BEGIN(3, 1, 0, 0); \
241 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
242 IEM_MC_ARG(uint8_t, u8Src, 1); \
243 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
245 \
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
248 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
249 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
250 IEM_MC_REF_EFLAGS(pEFlags); \
251 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
252 \
253 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
254 IEM_MC_END(); \
255 } \
256 (void)0
257
258
259/**
260 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
261 * memory/register as the destination.
262 */
263#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
265 \
266 /* \
267 * If rm is denoting a register, no more instruction bytes. \
268 */ \
269 if (IEM_IS_MODRM_REG_MODE(bRm)) \
270 { \
271 switch (pVCpu->iem.s.enmEffOpSize) \
272 { \
273 case IEMMODE_16BIT: \
274 IEM_MC_BEGIN(3, 0, 0, 0); \
275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
276 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
277 IEM_MC_ARG(uint16_t, u16Src, 1); \
278 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
279 \
280 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
281 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
282 IEM_MC_REF_EFLAGS(pEFlags); \
283 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
284 \
285 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
286 IEM_MC_END(); \
287 break; \
288 \
289 case IEMMODE_32BIT: \
290 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
292 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
293 IEM_MC_ARG(uint32_t, u32Src, 1); \
294 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
295 \
296 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
297 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
298 IEM_MC_REF_EFLAGS(pEFlags); \
299 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
300 \
301 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
302 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
303 IEM_MC_END(); \
304 break; \
305 \
306 case IEMMODE_64BIT: \
307 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
309 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
310 IEM_MC_ARG(uint64_t, u64Src, 1); \
311 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
312 \
313 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
314 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
315 IEM_MC_REF_EFLAGS(pEFlags); \
316 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
317 \
318 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
319 IEM_MC_END(); \
320 break; \
321 \
322 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
323 } \
324 } \
325 else \
326 { \
327 /* \
328 * We're accessing memory. \
329 * Note! We're putting the eflags on the stack here so we can commit them \
330 * after the memory. \
331 */ \
332 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
333 { \
334 switch (pVCpu->iem.s.enmEffOpSize) \
335 { \
336 case IEMMODE_16BIT: \
337 IEM_MC_BEGIN(3, 3, 0, 0); \
338 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
339 IEM_MC_ARG(uint16_t, u16Src, 1); \
340 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
342 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
343 \
344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
345 IEMOP_HLP_DONE_DECODING(); \
346 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
347 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
348 IEM_MC_FETCH_EFLAGS(EFlags); \
349 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
350 \
351 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
352 IEM_MC_COMMIT_EFLAGS(EFlags); \
353 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
354 IEM_MC_END(); \
355 break; \
356 \
357 case IEMMODE_32BIT: \
358 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
359 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
360 IEM_MC_ARG(uint32_t, u32Src, 1); \
361 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
363 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
364 \
365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
366 IEMOP_HLP_DONE_DECODING(); \
367 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
368 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
369 IEM_MC_FETCH_EFLAGS(EFlags); \
370 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
371 \
372 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
373 IEM_MC_COMMIT_EFLAGS(EFlags); \
374 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
375 IEM_MC_END(); \
376 break; \
377 \
378 case IEMMODE_64BIT: \
379 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
380 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
381 IEM_MC_ARG(uint64_t, u64Src, 1); \
382 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
384 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
385 \
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
387 IEMOP_HLP_DONE_DECODING(); \
388 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
389 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
390 IEM_MC_FETCH_EFLAGS(EFlags); \
391 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
392 \
393 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
394 IEM_MC_COMMIT_EFLAGS(EFlags); \
395 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
396 IEM_MC_END(); \
397 break; \
398 \
399 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
400 } \
401 } \
402 else \
403 { \
404 (void)0
405/* Separate macro to work around parsing issue in IEMAllInstPython.py */
406#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
407 switch (pVCpu->iem.s.enmEffOpSize) \
408 { \
409 case IEMMODE_16BIT: \
410 IEM_MC_BEGIN(3, 3, 0, 0); \
411 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
412 IEM_MC_ARG(uint16_t, u16Src, 1); \
413 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
415 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
416 \
417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
418 IEMOP_HLP_DONE_DECODING(); \
419 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
420 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
421 IEM_MC_FETCH_EFLAGS(EFlags); \
422 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
423 \
424 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
425 IEM_MC_COMMIT_EFLAGS(EFlags); \
426 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
427 IEM_MC_END(); \
428 break; \
429 \
430 case IEMMODE_32BIT: \
431 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
432 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
433 IEM_MC_ARG(uint32_t, u32Src, 1); \
434 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
436 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
437 \
438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
439 IEMOP_HLP_DONE_DECODING(); \
440 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
441 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
442 IEM_MC_FETCH_EFLAGS(EFlags); \
443 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
444 \
445 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo /* CMP,TEST */); \
446 IEM_MC_COMMIT_EFLAGS(EFlags); \
447 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
448 IEM_MC_END(); \
449 break; \
450 \
451 case IEMMODE_64BIT: \
452 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
453 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
454 IEM_MC_ARG(uint64_t, u64Src, 1); \
455 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
457 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
458 \
459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
460 IEMOP_HLP_DONE_DECODING(); \
461 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
462 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
463 IEM_MC_FETCH_EFLAGS(EFlags); \
464 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
465 \
466 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
467 IEM_MC_COMMIT_EFLAGS(EFlags); \
468 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
469 IEM_MC_END(); \
470 break; \
471 \
472 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
473 } \
474 } \
475 } \
476 (void)0
477
478/**
479 * Body for read-only word/dword/qword instructions like TEST and CMP with
480 * memory/register as the destination.
481 */
482#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
484 \
485 /* \
486 * If rm is denoting a register, no more instruction bytes. \
487 */ \
488 if (IEM_IS_MODRM_REG_MODE(bRm)) \
489 { \
490 switch (pVCpu->iem.s.enmEffOpSize) \
491 { \
492 case IEMMODE_16BIT: \
493 IEM_MC_BEGIN(3, 0, 0, 0); \
494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
495 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
496 IEM_MC_ARG(uint16_t, u16Src, 1); \
497 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
498 \
499 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
500 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
501 IEM_MC_REF_EFLAGS(pEFlags); \
502 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
503 \
504 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
505 IEM_MC_END(); \
506 break; \
507 \
508 case IEMMODE_32BIT: \
509 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
511 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
512 IEM_MC_ARG(uint32_t, u32Src, 1); \
513 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
514 \
515 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
516 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
517 IEM_MC_REF_EFLAGS(pEFlags); \
518 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
519 \
520 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
521 IEM_MC_END(); \
522 break; \
523 \
524 case IEMMODE_64BIT: \
525 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
527 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
528 IEM_MC_ARG(uint64_t, u64Src, 1); \
529 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
530 \
531 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
532 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
533 IEM_MC_REF_EFLAGS(pEFlags); \
534 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
535 \
536 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
537 IEM_MC_END(); \
538 break; \
539 \
540 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
541 } \
542 } \
543 else \
544 { \
545 /* \
546 * We're accessing memory. \
547 * Note! We're putting the eflags on the stack here so we can commit them \
548 * after the memory. \
549 */ \
550 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
551 { \
552 switch (pVCpu->iem.s.enmEffOpSize) \
553 { \
554 case IEMMODE_16BIT: \
555 IEM_MC_BEGIN(3, 3, 0, 0); \
556 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
557 IEM_MC_ARG(uint16_t, u16Src, 1); \
558 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
560 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
561 \
562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
563 IEMOP_HLP_DONE_DECODING(); \
564 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
565 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
566 IEM_MC_FETCH_EFLAGS(EFlags); \
567 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
568 \
569 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
570 IEM_MC_COMMIT_EFLAGS(EFlags); \
571 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
572 IEM_MC_END(); \
573 break; \
574 \
575 case IEMMODE_32BIT: \
576 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
577 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
578 IEM_MC_ARG(uint32_t, u32Src, 1); \
579 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
581 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
582 \
583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
584 IEMOP_HLP_DONE_DECODING(); \
585 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
586 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
587 IEM_MC_FETCH_EFLAGS(EFlags); \
588 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
589 \
590 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
591 IEM_MC_COMMIT_EFLAGS(EFlags); \
592 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
593 IEM_MC_END(); \
594 break; \
595 \
596 case IEMMODE_64BIT: \
597 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
598 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
599 IEM_MC_ARG(uint64_t, u64Src, 1); \
600 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
602 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
603 \
604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
605 IEMOP_HLP_DONE_DECODING(); \
606 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
607 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
608 IEM_MC_FETCH_EFLAGS(EFlags); \
609 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
610 \
611 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
612 IEM_MC_COMMIT_EFLAGS(EFlags); \
613 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
614 IEM_MC_END(); \
615 break; \
616 \
617 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
618 } \
619 } \
620 else \
621 { \
622 IEMOP_HLP_DONE_DECODING(); \
623 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
624 } \
625 } \
626 (void)0
627
628
629/**
630 * Body for instructions like ADD, AND, OR, ++ with working on AL with
631 * a byte immediate.
632 */
633#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
634 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
635 \
636 IEM_MC_BEGIN(3, 0, 0, 0); \
637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
638 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
639 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
640 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
641 \
642 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
643 IEM_MC_REF_EFLAGS(pEFlags); \
644 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
645 \
646 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
647 IEM_MC_END()
648
649/**
650 * Body for instructions like ADD, AND, OR, ++ with working on
651 * AX/EAX/RAX with a word/dword immediate.
652 */
653#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
654 switch (pVCpu->iem.s.enmEffOpSize) \
655 { \
656 case IEMMODE_16BIT: \
657 { \
658 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
659 \
660 IEM_MC_BEGIN(3, 0, 0, 0); \
661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
662 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
663 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
664 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
665 \
666 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
667 IEM_MC_REF_EFLAGS(pEFlags); \
668 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
669 \
670 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
671 IEM_MC_END(); \
672 } \
673 \
674 case IEMMODE_32BIT: \
675 { \
676 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
677 \
678 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
680 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
681 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
682 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
683 \
684 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
685 IEM_MC_REF_EFLAGS(pEFlags); \
686 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
687 \
688 if (a_fModifiesDstReg) \
689 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
690 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
691 IEM_MC_END(); \
692 } \
693 \
694 case IEMMODE_64BIT: \
695 { \
696 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
697 \
698 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
700 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
701 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
702 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
703 \
704 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
705 IEM_MC_REF_EFLAGS(pEFlags); \
706 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
707 \
708 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
709 IEM_MC_END(); \
710 } \
711 \
712 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
713 } \
714 (void)0
715
716
717
718/* Instruction specification format - work in progress: */
719
720/**
721 * @opcode 0x00
722 * @opmnemonic add
723 * @op1 rm:Eb
724 * @op2 reg:Gb
725 * @opmaps one
726 * @openc ModR/M
727 * @opflmodify cf,pf,af,zf,sf,of
728 * @ophints harmless ignores_op_sizes
729 * @opstats add_Eb_Gb
730 * @opgroup og_gen_arith_bin
731 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
732 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
733 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
734 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
735 */
736FNIEMOP_DEF(iemOp_add_Eb_Gb)
737{
738 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
739 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_add_u8);
740 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked);
741}
742
743
744/**
745 * @opcode 0x01
746 * @opgroup og_gen_arith_bin
747 * @opflmodify cf,pf,af,zf,sf,of
748 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
749 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
750 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
751 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
752 */
753FNIEMOP_DEF(iemOp_add_Ev_Gv)
754{
755 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
756 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
757 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
758}
759
760
761/**
762 * @opcode 0x02
763 * @opgroup og_gen_arith_bin
764 * @opflmodify cf,pf,af,zf,sf,of
765 * @opcopytests iemOp_add_Eb_Gb
766 */
767FNIEMOP_DEF(iemOp_add_Gb_Eb)
768{
769 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
770 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
771}
772
773
774/**
775 * @opcode 0x03
776 * @opgroup og_gen_arith_bin
777 * @opflmodify cf,pf,af,zf,sf,of
778 * @opcopytests iemOp_add_Ev_Gv
779 */
780FNIEMOP_DEF(iemOp_add_Gv_Ev)
781{
782 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
783 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1, 0);
784}
785
786
787/**
788 * @opcode 0x04
789 * @opgroup og_gen_arith_bin
790 * @opflmodify cf,pf,af,zf,sf,of
791 * @opcopytests iemOp_add_Eb_Gb
792 */
793FNIEMOP_DEF(iemOp_add_Al_Ib)
794{
795 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
796 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
797}
798
799
800/**
801 * @opcode 0x05
802 * @opgroup og_gen_arith_bin
803 * @opflmodify cf,pf,af,zf,sf,of
804 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
805 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
806 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
807 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
808 */
809FNIEMOP_DEF(iemOp_add_eAX_Iz)
810{
811 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
812 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
813}
814
815
816/**
817 * @opcode 0x06
818 * @opgroup og_stack_sreg
819 */
820FNIEMOP_DEF(iemOp_push_ES)
821{
822 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
823 IEMOP_HLP_NO_64BIT();
824 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
825}
826
827
828/**
829 * @opcode 0x07
830 * @opgroup og_stack_sreg
831 */
832FNIEMOP_DEF(iemOp_pop_ES)
833{
834 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
835 IEMOP_HLP_NO_64BIT();
836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
837 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
838 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
839 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
840 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
841 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES),
842 iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
843}
844
845
846/**
847 * @opcode 0x08
848 * @opgroup og_gen_arith_bin
849 * @opflmodify cf,pf,af,zf,sf,of
850 * @opflundef af
851 * @opflclear of,cf
852 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
853 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
854 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
855 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
856 */
857FNIEMOP_DEF(iemOp_or_Eb_Gb)
858{
859 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
860 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
861 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_or_u8);
862 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked);
863}
864
865
866/*
867 * @opcode 0x09
868 * @opgroup og_gen_arith_bin
869 * @opflmodify cf,pf,af,zf,sf,of
870 * @opflundef af
871 * @opflclear of,cf
872 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
873 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
874 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
875 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
876 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
877 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
878 */
879FNIEMOP_DEF(iemOp_or_Ev_Gv)
880{
881 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
882 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
883 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
884 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
885}
886
887
888/**
889 * @opcode 0x0a
890 * @opgroup og_gen_arith_bin
891 * @opflmodify cf,pf,af,zf,sf,of
892 * @opflundef af
893 * @opflclear of,cf
894 * @opcopytests iemOp_or_Eb_Gb
895 */
896FNIEMOP_DEF(iemOp_or_Gb_Eb)
897{
898 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
899 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
900 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
901}
902
903
904/**
905 * @opcode 0x0b
906 * @opgroup og_gen_arith_bin
907 * @opflmodify cf,pf,af,zf,sf,of
908 * @opflundef af
909 * @opflclear of,cf
910 * @opcopytests iemOp_or_Ev_Gv
911 */
912FNIEMOP_DEF(iemOp_or_Gv_Ev)
913{
914 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
915 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
916 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1, 0);
917}
918
919
920/**
921 * @opcode 0x0c
922 * @opgroup og_gen_arith_bin
923 * @opflmodify cf,pf,af,zf,sf,of
924 * @opflundef af
925 * @opflclear of,cf
926 * @opcopytests iemOp_or_Eb_Gb
927 */
928FNIEMOP_DEF(iemOp_or_Al_Ib)
929{
930 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
931 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
932 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
933}
934
935
936/**
937 * @opcode 0x0d
938 * @opgroup og_gen_arith_bin
939 * @opflmodify cf,pf,af,zf,sf,of
940 * @opflundef af
941 * @opflclear of,cf
942 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
943 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
944 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
945 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
946 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
947 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
948 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
949 */
950FNIEMOP_DEF(iemOp_or_eAX_Iz)
951{
952 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
953 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
954 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
955}
956
957
958/**
959 * @opcode 0x0e
960 * @opgroup og_stack_sreg
961 */
962FNIEMOP_DEF(iemOp_push_CS)
963{
964 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
965 IEMOP_HLP_NO_64BIT();
966 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
967}
968
969
970/**
971 * @opcode 0x0f
972 * @opmnemonic EscTwo0f
973 * @openc two0f
974 * @opdisenum OP_2B_ESC
975 * @ophints harmless
976 * @opgroup og_escapes
977 */
978FNIEMOP_DEF(iemOp_2byteEscape)
979{
980#if 0 /// @todo def VBOX_STRICT
981 /* Sanity check the table the first time around. */
982 static bool s_fTested = false;
983 if (RT_LIKELY(s_fTested)) { /* likely */ }
984 else
985 {
986 s_fTested = true;
987 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
988 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
989 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
990 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
991 }
992#endif
993
994 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
995 {
996 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
997 IEMOP_HLP_MIN_286();
998 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
999 }
1000 /* @opdone */
1001
1002 /*
1003 * On the 8086 this is a POP CS instruction.
1004 * For the time being we don't specify this this.
1005 */
1006 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
1007 IEMOP_HLP_NO_64BIT();
1008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1009 /** @todo eliminate END_TB here */
1010 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1011 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1012 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_CS),
1013 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1014}
1015
1016/**
1017 * @opcode 0x10
1018 * @opgroup og_gen_arith_bin
1019 * @opfltest cf
1020 * @opflmodify cf,pf,af,zf,sf,of
1021 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1022 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1023 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1024 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1025 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1026 */
1027FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1028{
1029 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1030 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_adc_u8);
1031 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked);
1032}
1033
1034
1035/**
1036 * @opcode 0x11
1037 * @opgroup og_gen_arith_bin
1038 * @opfltest cf
1039 * @opflmodify cf,pf,af,zf,sf,of
1040 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1041 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1042 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1043 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1044 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1045 */
1046FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1047{
1048 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1049 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
1050 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1051}
1052
1053
1054/**
1055 * @opcode 0x12
1056 * @opgroup og_gen_arith_bin
1057 * @opfltest cf
1058 * @opflmodify cf,pf,af,zf,sf,of
1059 * @opcopytests iemOp_adc_Eb_Gb
1060 */
1061FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1062{
1063 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1064 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
1065}
1066
1067
1068/**
1069 * @opcode 0x13
1070 * @opgroup og_gen_arith_bin
1071 * @opfltest cf
1072 * @opflmodify cf,pf,af,zf,sf,of
1073 * @opcopytests iemOp_adc_Ev_Gv
1074 */
1075FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1076{
1077 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1078 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1, 0);
1079}
1080
1081
1082/**
1083 * @opcode 0x14
1084 * @opgroup og_gen_arith_bin
1085 * @opfltest cf
1086 * @opflmodify cf,pf,af,zf,sf,of
1087 * @opcopytests iemOp_adc_Eb_Gb
1088 */
1089FNIEMOP_DEF(iemOp_adc_Al_Ib)
1090{
1091 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1092 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1093}
1094
1095
1096/**
1097 * @opcode 0x15
1098 * @opgroup og_gen_arith_bin
1099 * @opfltest cf
1100 * @opflmodify cf,pf,af,zf,sf,of
1101 * @opcopytests iemOp_adc_Ev_Gv
1102 */
1103FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1104{
1105 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1106 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1107}
1108
1109
1110/**
1111 * @opcode 0x16
1112 */
1113FNIEMOP_DEF(iemOp_push_SS)
1114{
1115 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1116 IEMOP_HLP_NO_64BIT();
1117 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1118}
1119
1120
1121/**
1122 * @opcode 0x17
1123 * @opgroup og_gen_arith_bin
1124 * @opfltest cf
1125 * @opflmodify cf,pf,af,zf,sf,of
1126 */
1127FNIEMOP_DEF(iemOp_pop_SS)
1128{
1129 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1131 IEMOP_HLP_NO_64BIT();
1132 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1133 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1134 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_SS)
1135 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_SS)
1136 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_SS),
1137 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1138}
1139
1140
1141/**
1142 * @opcode 0x18
1143 * @opgroup og_gen_arith_bin
1144 * @opfltest cf
1145 * @opflmodify cf,pf,af,zf,sf,of
1146 */
1147FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1148{
1149 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1150 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sbb_u8);
1151 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked);
1152}
1153
1154
1155/**
1156 * @opcode 0x19
1157 * @opgroup og_gen_arith_bin
1158 * @opfltest cf
1159 * @opflmodify cf,pf,af,zf,sf,of
1160 */
1161FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1162{
1163 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1164 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
1165 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1166}
1167
1168
1169/**
1170 * @opcode 0x1a
1171 * @opgroup og_gen_arith_bin
1172 * @opfltest cf
1173 * @opflmodify cf,pf,af,zf,sf,of
1174 */
1175FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1176{
1177 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1178 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
1179}
1180
1181
1182/**
1183 * @opcode 0x1b
1184 * @opgroup og_gen_arith_bin
1185 * @opfltest cf
1186 * @opflmodify cf,pf,af,zf,sf,of
1187 */
1188FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1189{
1190 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1191 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1, 0);
1192}
1193
1194
1195/**
1196 * @opcode 0x1c
1197 * @opgroup og_gen_arith_bin
1198 * @opfltest cf
1199 * @opflmodify cf,pf,af,zf,sf,of
1200 */
1201FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1202{
1203 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1204 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1205}
1206
1207
1208/**
1209 * @opcode 0x1d
1210 * @opgroup og_gen_arith_bin
1211 * @opfltest cf
1212 * @opflmodify cf,pf,af,zf,sf,of
1213 */
1214FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1215{
1216 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1217 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1218}
1219
1220
1221/**
1222 * @opcode 0x1e
1223 * @opgroup og_stack_sreg
1224 */
1225FNIEMOP_DEF(iemOp_push_DS)
1226{
1227 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1228 IEMOP_HLP_NO_64BIT();
1229 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1230}
1231
1232
1233/**
1234 * @opcode 0x1f
1235 * @opgroup og_stack_sreg
1236 */
1237FNIEMOP_DEF(iemOp_pop_DS)
1238{
1239 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1241 IEMOP_HLP_NO_64BIT();
1242 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1243 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1244 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
1245 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
1246 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS),
1247 iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1248}
1249
1250
1251/**
1252 * @opcode 0x20
1253 * @opgroup og_gen_arith_bin
1254 * @opflmodify cf,pf,af,zf,sf,of
1255 * @opflundef af
1256 * @opflclear of,cf
1257 */
1258FNIEMOP_DEF(iemOp_and_Eb_Gb)
1259{
1260 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1261 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1262 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_and_u8);
1263 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked);
1264}
1265
1266
1267/**
1268 * @opcode 0x21
1269 * @opgroup og_gen_arith_bin
1270 * @opflmodify cf,pf,af,zf,sf,of
1271 * @opflundef af
1272 * @opflclear of,cf
1273 */
1274FNIEMOP_DEF(iemOp_and_Ev_Gv)
1275{
1276 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1277 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1278 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
1279 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1280}
1281
1282
1283/**
1284 * @opcode 0x22
1285 * @opgroup og_gen_arith_bin
1286 * @opflmodify cf,pf,af,zf,sf,of
1287 * @opflundef af
1288 * @opflclear of,cf
1289 */
1290FNIEMOP_DEF(iemOp_and_Gb_Eb)
1291{
1292 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1293 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1294 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1295}
1296
1297
1298/**
1299 * @opcode 0x23
1300 * @opgroup og_gen_arith_bin
1301 * @opflmodify cf,pf,af,zf,sf,of
1302 * @opflundef af
1303 * @opflclear of,cf
1304 */
1305FNIEMOP_DEF(iemOp_and_Gv_Ev)
1306{
1307 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1308 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1309 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1, 0);
1310}
1311
1312
1313/**
1314 * @opcode 0x24
1315 * @opgroup og_gen_arith_bin
1316 * @opflmodify cf,pf,af,zf,sf,of
1317 * @opflundef af
1318 * @opflclear of,cf
1319 */
1320FNIEMOP_DEF(iemOp_and_Al_Ib)
1321{
1322 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1323 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1324 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1325}
1326
1327
1328/**
1329 * @opcode 0x25
1330 * @opgroup og_gen_arith_bin
1331 * @opflmodify cf,pf,af,zf,sf,of
1332 * @opflundef af
1333 * @opflclear of,cf
1334 */
1335FNIEMOP_DEF(iemOp_and_eAX_Iz)
1336{
1337 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1338 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1339 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1340}
1341
1342
1343/**
1344 * @opcode 0x26
1345 * @opmnemonic SEG
1346 * @op1 ES
1347 * @opgroup og_prefix
1348 * @openc prefix
1349 * @opdisenum OP_SEG
1350 * @ophints harmless
1351 */
1352FNIEMOP_DEF(iemOp_seg_ES)
1353{
1354 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1355 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1356 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1357
1358 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1359 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1360}
1361
1362
1363/**
1364 * @opcode 0x27
1365 * @opfltest af,cf
1366 * @opflmodify cf,pf,af,zf,sf,of
1367 * @opflundef of
1368 */
1369FNIEMOP_DEF(iemOp_daa)
1370{
1371 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1372 IEMOP_HLP_NO_64BIT();
1373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1374 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1375 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_daa);
1376}
1377
1378
1379/**
1380 * @opcode 0x28
1381 * @opgroup og_gen_arith_bin
1382 * @opflmodify cf,pf,af,zf,sf,of
1383 */
1384FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1385{
1386 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1387 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sub_u8);
1388 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked);
1389}
1390
1391
1392/**
1393 * @opcode 0x29
1394 * @opgroup og_gen_arith_bin
1395 * @opflmodify cf,pf,af,zf,sf,of
1396 */
1397FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1398{
1399 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1400 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
1401 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1402}
1403
1404
1405/**
1406 * @opcode 0x2a
1407 * @opgroup og_gen_arith_bin
1408 * @opflmodify cf,pf,af,zf,sf,of
1409 */
1410FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1411{
1412 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1413 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1414}
1415
1416
1417/**
1418 * @opcode 0x2b
1419 * @opgroup og_gen_arith_bin
1420 * @opflmodify cf,pf,af,zf,sf,of
1421 */
1422FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1423{
1424 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1425 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1, 0);
1426}
1427
1428
1429/**
1430 * @opcode 0x2c
1431 * @opgroup og_gen_arith_bin
1432 * @opflmodify cf,pf,af,zf,sf,of
1433 */
1434FNIEMOP_DEF(iemOp_sub_Al_Ib)
1435{
1436 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1437 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1438}
1439
1440
1441/**
1442 * @opcode 0x2d
1443 * @opgroup og_gen_arith_bin
1444 * @opflmodify cf,pf,af,zf,sf,of
1445 */
1446FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1447{
1448 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1449 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1450}
1451
1452
1453/**
1454 * @opcode 0x2e
1455 * @opmnemonic SEG
1456 * @op1 CS
1457 * @opgroup og_prefix
1458 * @openc prefix
1459 * @opdisenum OP_SEG
1460 * @ophints harmless
1461 */
1462FNIEMOP_DEF(iemOp_seg_CS)
1463{
1464 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1465 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1466 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1467
1468 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1469 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1470}
1471
1472
1473/**
1474 * @opcode 0x2f
1475 * @opfltest af,cf
1476 * @opflmodify cf,pf,af,zf,sf,of
1477 * @opflundef of
1478 */
1479FNIEMOP_DEF(iemOp_das)
1480{
1481 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1482 IEMOP_HLP_NO_64BIT();
1483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1484 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1485 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_das);
1486}
1487
1488
1489/**
1490 * @opcode 0x30
1491 * @opgroup og_gen_arith_bin
1492 * @opflmodify cf,pf,af,zf,sf,of
1493 * @opflundef af
1494 * @opflclear of,cf
1495 */
1496FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1497{
1498 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1499 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1500 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_xor_u8);
1501 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked);
1502}
1503
1504
1505/**
1506 * @opcode 0x31
1507 * @opgroup og_gen_arith_bin
1508 * @opflmodify cf,pf,af,zf,sf,of
1509 * @opflundef af
1510 * @opflclear of,cf
1511 */
1512FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1513{
1514 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1515 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1516 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
1517 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1518}
1519
1520
1521/**
1522 * @opcode 0x32
1523 * @opgroup og_gen_arith_bin
1524 * @opflmodify cf,pf,af,zf,sf,of
1525 * @opflundef af
1526 * @opflclear of,cf
1527 */
1528FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1529{
1530 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1531 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1532 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1533}
1534
1535
1536/**
1537 * @opcode 0x33
1538 * @opgroup og_gen_arith_bin
1539 * @opflmodify cf,pf,af,zf,sf,of
1540 * @opflundef af
1541 * @opflclear of,cf
1542 */
1543FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1544{
1545 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1546 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1547 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1, 0);
1548}
1549
1550
1551/**
1552 * @opcode 0x34
1553 * @opgroup og_gen_arith_bin
1554 * @opflmodify cf,pf,af,zf,sf,of
1555 * @opflundef af
1556 * @opflclear of,cf
1557 */
1558FNIEMOP_DEF(iemOp_xor_Al_Ib)
1559{
1560 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1561 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1562 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1563}
1564
1565
1566/**
1567 * @opcode 0x35
1568 * @opgroup og_gen_arith_bin
1569 * @opflmodify cf,pf,af,zf,sf,of
1570 * @opflundef af
1571 * @opflclear of,cf
1572 */
1573FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1574{
1575 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1576 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1577 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1578}
1579
1580
1581/**
1582 * @opcode 0x36
1583 * @opmnemonic SEG
1584 * @op1 SS
1585 * @opgroup og_prefix
1586 * @openc prefix
1587 * @opdisenum OP_SEG
1588 * @ophints harmless
1589 */
1590FNIEMOP_DEF(iemOp_seg_SS)
1591{
1592 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1593 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1594 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1595
1596 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1597 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1598}
1599
1600
1601/**
1602 * @opcode 0x37
1603 * @opfltest af,cf
1604 * @opflmodify cf,pf,af,zf,sf,of
1605 * @opflundef pf,zf,sf,of
1606 * @opgroup og_gen_arith_dec
1607 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1608 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1609 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1610 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1611 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1612 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1613 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1614 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1615 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1616 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1617 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1618 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1619 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1620 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1621 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1622 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1623 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1624 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1625 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1626 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1627 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1628 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1629 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1630 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1631 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1632 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1633 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1634 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1635 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1636 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1637 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1638 */
1639FNIEMOP_DEF(iemOp_aaa)
1640{
1641 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1642 IEMOP_HLP_NO_64BIT();
1643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1644 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1645
1646 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aaa);
1647}
1648
1649
1650/**
1651 * @opcode 0x38
1652 */
1653FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1654{
1655 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1656 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8);
1657 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
1658}
1659
1660
1661/**
1662 * @opcode 0x39
1663 */
1664FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1665{
1666 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1667 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
1668}
1669
1670
1671/**
1672 * @opcode 0x3a
1673 */
1674FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1675{
1676 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1677 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1678}
1679
1680
1681/**
1682 * @opcode 0x3b
1683 */
1684FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1685{
1686 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1687 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0, 0);
1688}
1689
1690
1691/**
1692 * @opcode 0x3c
1693 */
1694FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1695{
1696 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1697 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1698}
1699
1700
1701/**
1702 * @opcode 0x3d
1703 */
1704FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1705{
1706 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1707 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1708}
1709
1710
1711/**
1712 * @opcode 0x3e
1713 */
1714FNIEMOP_DEF(iemOp_seg_DS)
1715{
1716 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1717 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1718 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1719
1720 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1721 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1722}
1723
1724
1725/**
1726 * @opcode 0x3f
1727 * @opfltest af,cf
1728 * @opflmodify cf,pf,af,zf,sf,of
1729 * @opflundef pf,zf,sf,of
1730 * @opgroup og_gen_arith_dec
1731 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1732 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1733 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1734 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1735 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1736 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1737 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1738 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1739 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1740 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1741 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1742 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1743 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1744 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1745 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1746 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1747 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1748 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1749 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1750 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1751 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1752 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1753 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1754 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1755 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1756 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1757 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1758 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1759 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1760 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1761 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1762 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1763 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1764 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1765 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1766 */
1767FNIEMOP_DEF(iemOp_aas)
1768{
1769 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1770 IEMOP_HLP_NO_64BIT();
1771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1772 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1773
1774 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aas);
1775}
1776
1777
1778/**
1779 * Common 'inc/dec register' helper.
1780 *
1781 * Not for 64-bit code, only for what became the rex prefixes.
1782 */
1783#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1784 switch (pVCpu->iem.s.enmEffOpSize) \
1785 { \
1786 case IEMMODE_16BIT: \
1787 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT, 0); \
1788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1789 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1790 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1791 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1792 IEM_MC_REF_EFLAGS(pEFlags); \
1793 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1794 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1795 IEM_MC_END(); \
1796 break; \
1797 \
1798 case IEMMODE_32BIT: \
1799 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
1800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1801 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1802 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1803 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1804 IEM_MC_REF_EFLAGS(pEFlags); \
1805 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1806 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
1807 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1808 IEM_MC_END(); \
1809 break; \
1810 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1811 } \
1812 (void)0
1813
1814/**
1815 * @opcode 0x40
1816 */
1817FNIEMOP_DEF(iemOp_inc_eAX)
1818{
1819 /*
1820 * This is a REX prefix in 64-bit mode.
1821 */
1822 if (IEM_IS_64BIT_CODE(pVCpu))
1823 {
1824 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1825 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1826
1827 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1828 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1829 }
1830
1831 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1832 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1833}
1834
1835
1836/**
1837 * @opcode 0x41
1838 */
1839FNIEMOP_DEF(iemOp_inc_eCX)
1840{
1841 /*
1842 * This is a REX prefix in 64-bit mode.
1843 */
1844 if (IEM_IS_64BIT_CODE(pVCpu))
1845 {
1846 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1847 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1848 pVCpu->iem.s.uRexB = 1 << 3;
1849
1850 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1851 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1852 }
1853
1854 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1855 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1856}
1857
1858
1859/**
1860 * @opcode 0x42
1861 */
1862FNIEMOP_DEF(iemOp_inc_eDX)
1863{
1864 /*
1865 * This is a REX prefix in 64-bit mode.
1866 */
1867 if (IEM_IS_64BIT_CODE(pVCpu))
1868 {
1869 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1870 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1871 pVCpu->iem.s.uRexIndex = 1 << 3;
1872
1873 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1874 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1875 }
1876
1877 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1878 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1879}
1880
1881
1882
1883/**
1884 * @opcode 0x43
1885 */
1886FNIEMOP_DEF(iemOp_inc_eBX)
1887{
1888 /*
1889 * This is a REX prefix in 64-bit mode.
1890 */
1891 if (IEM_IS_64BIT_CODE(pVCpu))
1892 {
1893 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1894 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1895 pVCpu->iem.s.uRexB = 1 << 3;
1896 pVCpu->iem.s.uRexIndex = 1 << 3;
1897
1898 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1899 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1900 }
1901
1902 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1903 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1904}
1905
1906
1907/**
1908 * @opcode 0x44
1909 */
1910FNIEMOP_DEF(iemOp_inc_eSP)
1911{
1912 /*
1913 * This is a REX prefix in 64-bit mode.
1914 */
1915 if (IEM_IS_64BIT_CODE(pVCpu))
1916 {
1917 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1918 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1919 pVCpu->iem.s.uRexReg = 1 << 3;
1920
1921 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1922 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1923 }
1924
1925 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1926 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1927}
1928
1929
1930/**
1931 * @opcode 0x45
1932 */
1933FNIEMOP_DEF(iemOp_inc_eBP)
1934{
1935 /*
1936 * This is a REX prefix in 64-bit mode.
1937 */
1938 if (IEM_IS_64BIT_CODE(pVCpu))
1939 {
1940 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1941 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1942 pVCpu->iem.s.uRexReg = 1 << 3;
1943 pVCpu->iem.s.uRexB = 1 << 3;
1944
1945 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1946 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1947 }
1948
1949 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1950 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1951}
1952
1953
1954/**
1955 * @opcode 0x46
1956 */
1957FNIEMOP_DEF(iemOp_inc_eSI)
1958{
1959 /*
1960 * This is a REX prefix in 64-bit mode.
1961 */
1962 if (IEM_IS_64BIT_CODE(pVCpu))
1963 {
1964 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1965 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1966 pVCpu->iem.s.uRexReg = 1 << 3;
1967 pVCpu->iem.s.uRexIndex = 1 << 3;
1968
1969 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1970 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1971 }
1972
1973 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1974 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1975}
1976
1977
1978/**
1979 * @opcode 0x47
1980 */
1981FNIEMOP_DEF(iemOp_inc_eDI)
1982{
1983 /*
1984 * This is a REX prefix in 64-bit mode.
1985 */
1986 if (IEM_IS_64BIT_CODE(pVCpu))
1987 {
1988 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1989 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1990 pVCpu->iem.s.uRexReg = 1 << 3;
1991 pVCpu->iem.s.uRexB = 1 << 3;
1992 pVCpu->iem.s.uRexIndex = 1 << 3;
1993
1994 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1995 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1996 }
1997
1998 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1999 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
2000}
2001
2002
2003/**
2004 * @opcode 0x48
2005 */
2006FNIEMOP_DEF(iemOp_dec_eAX)
2007{
2008 /*
2009 * This is a REX prefix in 64-bit mode.
2010 */
2011 if (IEM_IS_64BIT_CODE(pVCpu))
2012 {
2013 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
2014 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
2015 iemRecalEffOpSize(pVCpu);
2016
2017 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2018 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2019 }
2020
2021 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2022 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2023}
2024
2025
2026/**
2027 * @opcode 0x49
2028 */
2029FNIEMOP_DEF(iemOp_dec_eCX)
2030{
2031 /*
2032 * This is a REX prefix in 64-bit mode.
2033 */
2034 if (IEM_IS_64BIT_CODE(pVCpu))
2035 {
2036 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2037 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2038 pVCpu->iem.s.uRexB = 1 << 3;
2039 iemRecalEffOpSize(pVCpu);
2040
2041 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2042 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2043 }
2044
2045 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2046 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2047}
2048
2049
2050/**
2051 * @opcode 0x4a
2052 */
2053FNIEMOP_DEF(iemOp_dec_eDX)
2054{
2055 /*
2056 * This is a REX prefix in 64-bit mode.
2057 */
2058 if (IEM_IS_64BIT_CODE(pVCpu))
2059 {
2060 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2061 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2062 pVCpu->iem.s.uRexIndex = 1 << 3;
2063 iemRecalEffOpSize(pVCpu);
2064
2065 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2066 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2067 }
2068
2069 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2070 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2071}
2072
2073
2074/**
2075 * @opcode 0x4b
2076 */
2077FNIEMOP_DEF(iemOp_dec_eBX)
2078{
2079 /*
2080 * This is a REX prefix in 64-bit mode.
2081 */
2082 if (IEM_IS_64BIT_CODE(pVCpu))
2083 {
2084 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2085 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2086 pVCpu->iem.s.uRexB = 1 << 3;
2087 pVCpu->iem.s.uRexIndex = 1 << 3;
2088 iemRecalEffOpSize(pVCpu);
2089
2090 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2091 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2092 }
2093
2094 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2095 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2096}
2097
2098
2099/**
2100 * @opcode 0x4c
2101 */
2102FNIEMOP_DEF(iemOp_dec_eSP)
2103{
2104 /*
2105 * This is a REX prefix in 64-bit mode.
2106 */
2107 if (IEM_IS_64BIT_CODE(pVCpu))
2108 {
2109 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2110 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2111 pVCpu->iem.s.uRexReg = 1 << 3;
2112 iemRecalEffOpSize(pVCpu);
2113
2114 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2115 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2116 }
2117
2118 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2119 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2120}
2121
2122
2123/**
2124 * @opcode 0x4d
2125 */
2126FNIEMOP_DEF(iemOp_dec_eBP)
2127{
2128 /*
2129 * This is a REX prefix in 64-bit mode.
2130 */
2131 if (IEM_IS_64BIT_CODE(pVCpu))
2132 {
2133 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2134 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2135 pVCpu->iem.s.uRexReg = 1 << 3;
2136 pVCpu->iem.s.uRexB = 1 << 3;
2137 iemRecalEffOpSize(pVCpu);
2138
2139 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2140 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2141 }
2142
2143 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2144 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2145}
2146
2147
2148/**
2149 * @opcode 0x4e
2150 */
2151FNIEMOP_DEF(iemOp_dec_eSI)
2152{
2153 /*
2154 * This is a REX prefix in 64-bit mode.
2155 */
2156 if (IEM_IS_64BIT_CODE(pVCpu))
2157 {
2158 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2159 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2160 pVCpu->iem.s.uRexReg = 1 << 3;
2161 pVCpu->iem.s.uRexIndex = 1 << 3;
2162 iemRecalEffOpSize(pVCpu);
2163
2164 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2165 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2166 }
2167
2168 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2169 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2170}
2171
2172
2173/**
2174 * @opcode 0x4f
2175 */
2176FNIEMOP_DEF(iemOp_dec_eDI)
2177{
2178 /*
2179 * This is a REX prefix in 64-bit mode.
2180 */
2181 if (IEM_IS_64BIT_CODE(pVCpu))
2182 {
2183 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2184 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2185 pVCpu->iem.s.uRexReg = 1 << 3;
2186 pVCpu->iem.s.uRexB = 1 << 3;
2187 pVCpu->iem.s.uRexIndex = 1 << 3;
2188 iemRecalEffOpSize(pVCpu);
2189
2190 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2191 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2192 }
2193
2194 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2195 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2196}
2197
2198
2199/**
2200 * Common 'push register' helper.
2201 */
2202FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2203{
2204 if (IEM_IS_64BIT_CODE(pVCpu))
2205 {
2206 iReg |= pVCpu->iem.s.uRexB;
2207 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2208 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2209 }
2210
2211 switch (pVCpu->iem.s.enmEffOpSize)
2212 {
2213 case IEMMODE_16BIT:
2214 IEM_MC_BEGIN(0, 1, 0, 0);
2215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2216 IEM_MC_LOCAL(uint16_t, u16Value);
2217 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2218 IEM_MC_PUSH_U16(u16Value);
2219 IEM_MC_ADVANCE_RIP_AND_FINISH();
2220 IEM_MC_END();
2221 break;
2222
2223 case IEMMODE_32BIT:
2224 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2226 IEM_MC_LOCAL(uint32_t, u32Value);
2227 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2228 IEM_MC_PUSH_U32(u32Value);
2229 IEM_MC_ADVANCE_RIP_AND_FINISH();
2230 IEM_MC_END();
2231 break;
2232
2233 case IEMMODE_64BIT:
2234 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2236 IEM_MC_LOCAL(uint64_t, u64Value);
2237 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2238 IEM_MC_PUSH_U64(u64Value);
2239 IEM_MC_ADVANCE_RIP_AND_FINISH();
2240 IEM_MC_END();
2241 break;
2242
2243 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2244 }
2245}
2246
2247
2248/**
2249 * @opcode 0x50
2250 */
2251FNIEMOP_DEF(iemOp_push_eAX)
2252{
2253 IEMOP_MNEMONIC(push_rAX, "push rAX");
2254 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2255}
2256
2257
2258/**
2259 * @opcode 0x51
2260 */
2261FNIEMOP_DEF(iemOp_push_eCX)
2262{
2263 IEMOP_MNEMONIC(push_rCX, "push rCX");
2264 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2265}
2266
2267
2268/**
2269 * @opcode 0x52
2270 */
2271FNIEMOP_DEF(iemOp_push_eDX)
2272{
2273 IEMOP_MNEMONIC(push_rDX, "push rDX");
2274 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2275}
2276
2277
2278/**
2279 * @opcode 0x53
2280 */
2281FNIEMOP_DEF(iemOp_push_eBX)
2282{
2283 IEMOP_MNEMONIC(push_rBX, "push rBX");
2284 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2285}
2286
2287
2288/**
2289 * @opcode 0x54
2290 */
2291FNIEMOP_DEF(iemOp_push_eSP)
2292{
2293 IEMOP_MNEMONIC(push_rSP, "push rSP");
2294 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
2295 {
2296 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086, 0);
2297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2298 IEM_MC_LOCAL(uint16_t, u16Value);
2299 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2300 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2301 IEM_MC_PUSH_U16(u16Value);
2302 IEM_MC_ADVANCE_RIP_AND_FINISH();
2303 IEM_MC_END();
2304 }
2305 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2306}
2307
2308
2309/**
2310 * @opcode 0x55
2311 */
2312FNIEMOP_DEF(iemOp_push_eBP)
2313{
2314 IEMOP_MNEMONIC(push_rBP, "push rBP");
2315 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2316}
2317
2318
2319/**
2320 * @opcode 0x56
2321 */
2322FNIEMOP_DEF(iemOp_push_eSI)
2323{
2324 IEMOP_MNEMONIC(push_rSI, "push rSI");
2325 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2326}
2327
2328
2329/**
2330 * @opcode 0x57
2331 */
2332FNIEMOP_DEF(iemOp_push_eDI)
2333{
2334 IEMOP_MNEMONIC(push_rDI, "push rDI");
2335 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2336}
2337
2338
2339/**
2340 * Common 'pop register' helper.
2341 */
2342FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2343{
2344 if (IEM_IS_64BIT_CODE(pVCpu))
2345 {
2346 iReg |= pVCpu->iem.s.uRexB;
2347 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2348 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2349 }
2350
2351 switch (pVCpu->iem.s.enmEffOpSize)
2352 {
2353 case IEMMODE_16BIT:
2354 IEM_MC_BEGIN(0, 1, 0, 0);
2355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2356 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2357 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
2358 IEM_MC_POP_U16(pu16Dst);
2359 IEM_MC_ADVANCE_RIP_AND_FINISH();
2360 IEM_MC_END();
2361 break;
2362
2363 case IEMMODE_32BIT:
2364 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2366 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2367 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
2368 IEM_MC_POP_U32(pu32Dst);
2369 IEM_MC_CLEAR_HIGH_GREG_U64(iReg); /** @todo testcase*/
2370 IEM_MC_ADVANCE_RIP_AND_FINISH();
2371 IEM_MC_END();
2372 break;
2373
2374 case IEMMODE_64BIT:
2375 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2377 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2378 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
2379 IEM_MC_POP_U64(pu64Dst);
2380 IEM_MC_ADVANCE_RIP_AND_FINISH();
2381 IEM_MC_END();
2382 break;
2383
2384 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2385 }
2386}
2387
2388
2389/**
2390 * @opcode 0x58
2391 */
2392FNIEMOP_DEF(iemOp_pop_eAX)
2393{
2394 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2395 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2396}
2397
2398
2399/**
2400 * @opcode 0x59
2401 */
2402FNIEMOP_DEF(iemOp_pop_eCX)
2403{
2404 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2405 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2406}
2407
2408
2409/**
2410 * @opcode 0x5a
2411 */
2412FNIEMOP_DEF(iemOp_pop_eDX)
2413{
2414 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2415 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2416}
2417
2418
2419/**
2420 * @opcode 0x5b
2421 */
2422FNIEMOP_DEF(iemOp_pop_eBX)
2423{
2424 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2425 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2426}
2427
2428
2429/**
2430 * @opcode 0x5c
2431 */
2432FNIEMOP_DEF(iemOp_pop_eSP)
2433{
2434 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2435 if (IEM_IS_64BIT_CODE(pVCpu))
2436 {
2437 if (pVCpu->iem.s.uRexB)
2438 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2439 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2440 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2441 }
2442
2443 /** @todo add testcase for this instruction. */
2444 switch (pVCpu->iem.s.enmEffOpSize)
2445 {
2446 case IEMMODE_16BIT:
2447 IEM_MC_BEGIN(0, 1, 0, 0);
2448 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2449 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2450 IEM_MC_LOCAL(uint16_t, u16Dst);
2451 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
2452 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
2453 IEM_MC_ADVANCE_RIP_AND_FINISH();
2454 IEM_MC_END();
2455 break;
2456
2457 case IEMMODE_32BIT:
2458 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
2459 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2460 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2461 IEM_MC_LOCAL(uint32_t, u32Dst);
2462 IEM_MC_POP_U32(&u32Dst);
2463 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
2464 IEM_MC_ADVANCE_RIP_AND_FINISH();
2465 IEM_MC_END();
2466 break;
2467
2468 case IEMMODE_64BIT:
2469 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2470 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2471 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2472 IEM_MC_LOCAL(uint64_t, u64Dst);
2473 IEM_MC_POP_U64(&u64Dst);
2474 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
2475 IEM_MC_ADVANCE_RIP_AND_FINISH();
2476 IEM_MC_END();
2477 break;
2478
2479 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2480 }
2481}
2482
2483
2484/**
2485 * @opcode 0x5d
2486 */
2487FNIEMOP_DEF(iemOp_pop_eBP)
2488{
2489 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2490 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2491}
2492
2493
2494/**
2495 * @opcode 0x5e
2496 */
2497FNIEMOP_DEF(iemOp_pop_eSI)
2498{
2499 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2500 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2501}
2502
2503
2504/**
2505 * @opcode 0x5f
2506 */
2507FNIEMOP_DEF(iemOp_pop_eDI)
2508{
2509 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2510 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2511}
2512
2513
2514/**
2515 * @opcode 0x60
2516 */
2517FNIEMOP_DEF(iemOp_pusha)
2518{
2519 IEMOP_MNEMONIC(pusha, "pusha");
2520 IEMOP_HLP_MIN_186();
2521 IEMOP_HLP_NO_64BIT();
2522 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2523 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_16);
2524 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2525 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_32);
2526}
2527
2528
2529/**
2530 * @opcode 0x61
2531 */
2532FNIEMOP_DEF(iemOp_popa__mvex)
2533{
2534 if (!IEM_IS_64BIT_CODE(pVCpu))
2535 {
2536 IEMOP_MNEMONIC(popa, "popa");
2537 IEMOP_HLP_MIN_186();
2538 IEMOP_HLP_NO_64BIT();
2539 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2540 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2541 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2542 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2543 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2544 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2545 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2546 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2547 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2548 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2549 iemCImpl_popa_16);
2550 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2551 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2552 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2553 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2554 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2555 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2556 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2557 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2558 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2559 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2560 iemCImpl_popa_32);
2561 }
2562 IEMOP_MNEMONIC(mvex, "mvex");
2563 Log(("mvex prefix is not supported!\n"));
2564 IEMOP_RAISE_INVALID_OPCODE_RET();
2565}
2566
2567
2568/**
2569 * @opcode 0x62
2570 * @opmnemonic bound
2571 * @op1 Gv_RO
2572 * @op2 Ma
2573 * @opmincpu 80186
2574 * @ophints harmless x86_invalid_64
2575 * @optest op1=0 op2=0 ->
2576 * @optest op1=1 op2=0 -> value.xcpt=5
2577 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2578 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2579 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2580 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2581 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2582 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2583 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2584 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2585 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2586 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2587 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2588 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2589 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2590 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2591 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2592 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2593 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2594 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2595 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2596 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2597 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2598 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2599 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2600 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2601 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2602 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2603 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2604 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2605 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2606 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2607 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2608 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2609 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2610 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2611 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2612 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2613 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2614 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2615 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2616 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2617 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2618 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2619 */
2620FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2621{
2622 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2623 compatability mode it is invalid with MOD=3.
2624
2625 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2626 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2627 given as R and X without an exact description, so we assume it builds on
2628 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2629 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2630 uint8_t bRm;
2631 if (!IEM_IS_64BIT_CODE(pVCpu))
2632 {
2633 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2634 IEMOP_HLP_MIN_186();
2635 IEM_OPCODE_GET_NEXT_U8(&bRm);
2636 if (IEM_IS_MODRM_MEM_MODE(bRm))
2637 {
2638 /** @todo testcase: check that there are two memory accesses involved. Check
2639 * whether they're both read before the \#BR triggers. */
2640 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2641 {
2642 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
2643 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2644 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2645 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2647
2648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2650
2651 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2652 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2653 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2654
2655 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2656 IEM_MC_END();
2657 }
2658 else /* 32-bit operands */
2659 {
2660 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2661 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2662 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2663 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2665
2666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2668
2669 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2670 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2671 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2672
2673 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2674 IEM_MC_END();
2675 }
2676 }
2677
2678 /*
2679 * @opdone
2680 */
2681 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2682 {
2683 /* Note that there is no need for the CPU to fetch further bytes
2684 here because MODRM.MOD == 3. */
2685 Log(("evex not supported by the guest CPU!\n"));
2686 IEMOP_RAISE_INVALID_OPCODE_RET();
2687 }
2688 }
2689 else
2690 {
2691 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2692 * does modr/m read, whereas AMD probably doesn't... */
2693 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2694 {
2695 Log(("evex not supported by the guest CPU!\n"));
2696 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2697 }
2698 IEM_OPCODE_GET_NEXT_U8(&bRm);
2699 }
2700
2701 IEMOP_MNEMONIC(evex, "evex");
2702 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2703 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2704 Log(("evex prefix is not implemented!\n"));
2705 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2706}
2707
2708
2709/** Opcode 0x63 - non-64-bit modes. */
2710FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2711{
2712 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2713 IEMOP_HLP_MIN_286();
2714 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2716
2717 if (IEM_IS_MODRM_REG_MODE(bRm))
2718 {
2719 /* Register */
2720 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2721 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2722 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2723 IEM_MC_ARG(uint16_t, u16Src, 1);
2724 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2725
2726 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2727 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2728 IEM_MC_REF_EFLAGS(pEFlags);
2729 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2730
2731 IEM_MC_ADVANCE_RIP_AND_FINISH();
2732 IEM_MC_END();
2733 }
2734 else
2735 {
2736 /* Memory */
2737 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2738 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2739 IEM_MC_ARG(uint16_t, u16Src, 1);
2740 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2742 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2743
2744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2745 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2746 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2747 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2748 IEM_MC_FETCH_EFLAGS(EFlags);
2749 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2750
2751 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
2752 IEM_MC_COMMIT_EFLAGS(EFlags);
2753 IEM_MC_ADVANCE_RIP_AND_FINISH();
2754 IEM_MC_END();
2755 }
2756}
2757
2758
2759/**
2760 * @opcode 0x63
2761 *
2762 * @note This is a weird one. It works like a regular move instruction if
2763 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2764 * @todo This definitely needs a testcase to verify the odd cases. */
2765FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2766{
2767 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2768
2769 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2771
2772 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2773 {
2774 if (IEM_IS_MODRM_REG_MODE(bRm))
2775 {
2776 /*
2777 * Register to register.
2778 */
2779 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2781 IEM_MC_LOCAL(uint64_t, u64Value);
2782 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2783 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2784 IEM_MC_ADVANCE_RIP_AND_FINISH();
2785 IEM_MC_END();
2786 }
2787 else
2788 {
2789 /*
2790 * We're loading a register from memory.
2791 */
2792 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
2793 IEM_MC_LOCAL(uint64_t, u64Value);
2794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2797 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2798 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2799 IEM_MC_ADVANCE_RIP_AND_FINISH();
2800 IEM_MC_END();
2801 }
2802 }
2803 else
2804 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2805}
2806
2807
2808/**
2809 * @opcode 0x64
2810 * @opmnemonic segfs
2811 * @opmincpu 80386
2812 * @opgroup og_prefixes
2813 */
2814FNIEMOP_DEF(iemOp_seg_FS)
2815{
2816 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2817 IEMOP_HLP_MIN_386();
2818
2819 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2820 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2821
2822 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2823 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2824}
2825
2826
2827/**
2828 * @opcode 0x65
2829 * @opmnemonic seggs
2830 * @opmincpu 80386
2831 * @opgroup og_prefixes
2832 */
2833FNIEMOP_DEF(iemOp_seg_GS)
2834{
2835 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2836 IEMOP_HLP_MIN_386();
2837
2838 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2839 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2840
2841 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2842 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2843}
2844
2845
2846/**
2847 * @opcode 0x66
2848 * @opmnemonic opsize
2849 * @openc prefix
2850 * @opmincpu 80386
2851 * @ophints harmless
2852 * @opgroup og_prefixes
2853 */
2854FNIEMOP_DEF(iemOp_op_size)
2855{
2856 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2857 IEMOP_HLP_MIN_386();
2858
2859 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2860 iemRecalEffOpSize(pVCpu);
2861
2862 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2863 when REPZ or REPNZ are present. */
2864 if (pVCpu->iem.s.idxPrefix == 0)
2865 pVCpu->iem.s.idxPrefix = 1;
2866
2867 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2868 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2869}
2870
2871
2872/**
2873 * @opcode 0x67
2874 * @opmnemonic addrsize
2875 * @openc prefix
2876 * @opmincpu 80386
2877 * @ophints harmless
2878 * @opgroup og_prefixes
2879 */
2880FNIEMOP_DEF(iemOp_addr_size)
2881{
2882 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2883 IEMOP_HLP_MIN_386();
2884
2885 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2886 switch (pVCpu->iem.s.enmDefAddrMode)
2887 {
2888 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2889 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2890 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2891 default: AssertFailed();
2892 }
2893
2894 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2895 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2896}
2897
2898
2899/**
2900 * @opcode 0x68
2901 */
2902FNIEMOP_DEF(iemOp_push_Iz)
2903{
2904 IEMOP_MNEMONIC(push_Iz, "push Iz");
2905 IEMOP_HLP_MIN_186();
2906 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2907 switch (pVCpu->iem.s.enmEffOpSize)
2908 {
2909 case IEMMODE_16BIT:
2910 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
2911 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2913 IEM_MC_PUSH_U16(u16Imm);
2914 IEM_MC_ADVANCE_RIP_AND_FINISH();
2915 IEM_MC_END();
2916 break;
2917
2918 case IEMMODE_32BIT:
2919 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2920 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2922 IEM_MC_PUSH_U32(u32Imm);
2923 IEM_MC_ADVANCE_RIP_AND_FINISH();
2924 IEM_MC_END();
2925 break;
2926
2927 case IEMMODE_64BIT:
2928 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
2929 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2931 IEM_MC_PUSH_U64(u64Imm);
2932 IEM_MC_ADVANCE_RIP_AND_FINISH();
2933 IEM_MC_END();
2934 break;
2935
2936 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2937 }
2938}
2939
2940
2941/**
2942 * @opcode 0x69
2943 */
2944FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2945{
2946 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2947 IEMOP_HLP_MIN_186();
2948 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2949 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2950
2951 switch (pVCpu->iem.s.enmEffOpSize)
2952 {
2953 case IEMMODE_16BIT:
2954 {
2955 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2956 if (IEM_IS_MODRM_REG_MODE(bRm))
2957 {
2958 /* register operand */
2959 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2960 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
2961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2962 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2963 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2964 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2965 IEM_MC_LOCAL(uint16_t, u16Tmp);
2966
2967 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2968 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2969 IEM_MC_REF_EFLAGS(pEFlags);
2970 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2971 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2972
2973 IEM_MC_ADVANCE_RIP_AND_FINISH();
2974 IEM_MC_END();
2975 }
2976 else
2977 {
2978 /* memory operand */
2979 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
2980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2981 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2982
2983 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2985
2986 IEM_MC_LOCAL(uint16_t, u16Tmp);
2987 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2988
2989 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2990 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
2991 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2992 IEM_MC_REF_EFLAGS(pEFlags);
2993 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2994 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2995
2996 IEM_MC_ADVANCE_RIP_AND_FINISH();
2997 IEM_MC_END();
2998 }
2999 break;
3000 }
3001
3002 case IEMMODE_32BIT:
3003 {
3004 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3005 if (IEM_IS_MODRM_REG_MODE(bRm))
3006 {
3007 /* register operand */
3008 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3009 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3011 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3012 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
3013 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3014 IEM_MC_LOCAL(uint32_t, u32Tmp);
3015
3016 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3017 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3018 IEM_MC_REF_EFLAGS(pEFlags);
3019 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3020 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3021
3022 IEM_MC_ADVANCE_RIP_AND_FINISH();
3023 IEM_MC_END();
3024 }
3025 else
3026 {
3027 /* memory operand */
3028 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3029 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3031
3032 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3034
3035 IEM_MC_LOCAL(uint32_t, u32Tmp);
3036 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3037
3038 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3039 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3040 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3041 IEM_MC_REF_EFLAGS(pEFlags);
3042 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3043 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3044
3045 IEM_MC_ADVANCE_RIP_AND_FINISH();
3046 IEM_MC_END();
3047 }
3048 break;
3049 }
3050
3051 case IEMMODE_64BIT:
3052 {
3053 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3054 if (IEM_IS_MODRM_REG_MODE(bRm))
3055 {
3056 /* register operand */
3057 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3058 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3060 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3061 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
3062 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3063 IEM_MC_LOCAL(uint64_t, u64Tmp);
3064
3065 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3066 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3067 IEM_MC_REF_EFLAGS(pEFlags);
3068 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3069 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3070
3071 IEM_MC_ADVANCE_RIP_AND_FINISH();
3072 IEM_MC_END();
3073 }
3074 else
3075 {
3076 /* memory operand */
3077 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3080
3081 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /* parameter count for the threaded function for this block. */
3083
3084 IEM_MC_LOCAL(uint64_t, u64Tmp);
3085 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3086
3087 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3088 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int32_t)u32Imm, 1);
3089 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3090 IEM_MC_REF_EFLAGS(pEFlags);
3091 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3092 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3093
3094 IEM_MC_ADVANCE_RIP_AND_FINISH();
3095 IEM_MC_END();
3096 }
3097 break;
3098 }
3099
3100 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3101 }
3102}
3103
3104
3105/**
3106 * @opcode 0x6a
3107 */
3108FNIEMOP_DEF(iemOp_push_Ib)
3109{
3110 IEMOP_MNEMONIC(push_Ib, "push Ib");
3111 IEMOP_HLP_MIN_186();
3112 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3113 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3114
3115 switch (pVCpu->iem.s.enmEffOpSize)
3116 {
3117 case IEMMODE_16BIT:
3118 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
3119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3120 IEM_MC_PUSH_U16(i8Imm);
3121 IEM_MC_ADVANCE_RIP_AND_FINISH();
3122 IEM_MC_END();
3123 break;
3124 case IEMMODE_32BIT:
3125 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3127 IEM_MC_PUSH_U32(i8Imm);
3128 IEM_MC_ADVANCE_RIP_AND_FINISH();
3129 IEM_MC_END();
3130 break;
3131 case IEMMODE_64BIT:
3132 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
3133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3134 IEM_MC_PUSH_U64(i8Imm);
3135 IEM_MC_ADVANCE_RIP_AND_FINISH();
3136 IEM_MC_END();
3137 break;
3138 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3139 }
3140}
3141
3142
3143/**
3144 * @opcode 0x6b
3145 */
3146FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3147{
3148 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3149 IEMOP_HLP_MIN_186();
3150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3151 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3152
3153 switch (pVCpu->iem.s.enmEffOpSize)
3154 {
3155 case IEMMODE_16BIT:
3156 {
3157 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3158 if (IEM_IS_MODRM_REG_MODE(bRm))
3159 {
3160 /* register operand */
3161 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3162 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3164 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3165 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3166 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3167 IEM_MC_LOCAL(uint16_t, u16Tmp);
3168
3169 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3170 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3171 IEM_MC_REF_EFLAGS(pEFlags);
3172 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3173 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3174
3175 IEM_MC_ADVANCE_RIP_AND_FINISH();
3176 IEM_MC_END();
3177 }
3178 else
3179 {
3180 /* memory operand */
3181 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3182
3183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3185
3186 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3188
3189 IEM_MC_LOCAL(uint16_t, u16Tmp);
3190 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3191
3192 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3193 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3194 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3195 IEM_MC_REF_EFLAGS(pEFlags);
3196 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3197 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3198
3199 IEM_MC_ADVANCE_RIP_AND_FINISH();
3200 IEM_MC_END();
3201 }
3202 break;
3203 }
3204
3205 case IEMMODE_32BIT:
3206 {
3207 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3208 if (IEM_IS_MODRM_REG_MODE(bRm))
3209 {
3210 /* register operand */
3211 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3212 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3214 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3215 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3216 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3217 IEM_MC_LOCAL(uint32_t, u32Tmp);
3218
3219 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3220 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3221 IEM_MC_REF_EFLAGS(pEFlags);
3222 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3223 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3224
3225 IEM_MC_ADVANCE_RIP_AND_FINISH();
3226 IEM_MC_END();
3227 }
3228 else
3229 {
3230 /* memory operand */
3231 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3234
3235 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3237
3238 IEM_MC_LOCAL(uint32_t, u32Tmp);
3239 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3240
3241 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3242 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3243 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3244 IEM_MC_REF_EFLAGS(pEFlags);
3245 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3246 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3247
3248 IEM_MC_ADVANCE_RIP_AND_FINISH();
3249 IEM_MC_END();
3250 }
3251 break;
3252 }
3253
3254 case IEMMODE_64BIT:
3255 {
3256 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3257 if (IEM_IS_MODRM_REG_MODE(bRm))
3258 {
3259 /* register operand */
3260 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3261 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3263 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3264 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3265 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3266 IEM_MC_LOCAL(uint64_t, u64Tmp);
3267
3268 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3269 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3270 IEM_MC_REF_EFLAGS(pEFlags);
3271 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3272 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3273
3274 IEM_MC_ADVANCE_RIP_AND_FINISH();
3275 IEM_MC_END();
3276 }
3277 else
3278 {
3279 /* memory operand */
3280 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3283
3284 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the threaded parameter count. */
3285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3286
3287 IEM_MC_LOCAL(uint64_t, u64Tmp);
3288 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3289
3290 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3291 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3292 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3293 IEM_MC_REF_EFLAGS(pEFlags);
3294 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3295 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3296
3297 IEM_MC_ADVANCE_RIP_AND_FINISH();
3298 IEM_MC_END();
3299 }
3300 break;
3301 }
3302
3303 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3304 }
3305}
3306
3307
3308/**
3309 * @opcode 0x6c
3310 */
3311FNIEMOP_DEF(iemOp_insb_Yb_DX)
3312{
3313 IEMOP_HLP_MIN_186();
3314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3315 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3316 {
3317 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3318 switch (pVCpu->iem.s.enmEffAddrMode)
3319 {
3320 case IEMMODE_16BIT:
3321 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3322 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3323 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3324 iemCImpl_rep_ins_op8_addr16, false);
3325 case IEMMODE_32BIT:
3326 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3327 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3328 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3329 iemCImpl_rep_ins_op8_addr32, false);
3330 case IEMMODE_64BIT:
3331 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3332 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3333 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3334 iemCImpl_rep_ins_op8_addr64, false);
3335 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3336 }
3337 }
3338 else
3339 {
3340 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3341 switch (pVCpu->iem.s.enmEffAddrMode)
3342 {
3343 case IEMMODE_16BIT:
3344 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3345 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3346 iemCImpl_ins_op8_addr16, false);
3347 case IEMMODE_32BIT:
3348 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3349 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3350 iemCImpl_ins_op8_addr32, false);
3351 case IEMMODE_64BIT:
3352 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3353 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3354 iemCImpl_ins_op8_addr64, false);
3355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3356 }
3357 }
3358}
3359
3360
3361/**
3362 * @opcode 0x6d
3363 */
3364FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3365{
3366 IEMOP_HLP_MIN_186();
3367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3368 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3369 {
3370 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3371 switch (pVCpu->iem.s.enmEffOpSize)
3372 {
3373 case IEMMODE_16BIT:
3374 switch (pVCpu->iem.s.enmEffAddrMode)
3375 {
3376 case IEMMODE_16BIT:
3377 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3378 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3379 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3380 iemCImpl_rep_ins_op16_addr16, false);
3381 case IEMMODE_32BIT:
3382 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3383 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3384 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3385 iemCImpl_rep_ins_op16_addr32, false);
3386 case IEMMODE_64BIT:
3387 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3388 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3389 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3390 iemCImpl_rep_ins_op16_addr64, false);
3391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3392 }
3393 break;
3394 case IEMMODE_64BIT:
3395 case IEMMODE_32BIT:
3396 switch (pVCpu->iem.s.enmEffAddrMode)
3397 {
3398 case IEMMODE_16BIT:
3399 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3400 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3401 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3402 iemCImpl_rep_ins_op32_addr16, false);
3403 case IEMMODE_32BIT:
3404 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3405 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3406 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3407 iemCImpl_rep_ins_op32_addr32, false);
3408 case IEMMODE_64BIT:
3409 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3410 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3411 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3412 iemCImpl_rep_ins_op32_addr64, false);
3413 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3414 }
3415 break;
3416 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3417 }
3418 }
3419 else
3420 {
3421 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3422 switch (pVCpu->iem.s.enmEffOpSize)
3423 {
3424 case IEMMODE_16BIT:
3425 switch (pVCpu->iem.s.enmEffAddrMode)
3426 {
3427 case IEMMODE_16BIT:
3428 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3429 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3430 iemCImpl_ins_op16_addr16, false);
3431 case IEMMODE_32BIT:
3432 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3433 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3434 iemCImpl_ins_op16_addr32, false);
3435 case IEMMODE_64BIT:
3436 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3437 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3438 iemCImpl_ins_op16_addr64, false);
3439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3440 }
3441 break;
3442 case IEMMODE_64BIT:
3443 case IEMMODE_32BIT:
3444 switch (pVCpu->iem.s.enmEffAddrMode)
3445 {
3446 case IEMMODE_16BIT:
3447 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3448 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3449 iemCImpl_ins_op32_addr16, false);
3450 case IEMMODE_32BIT:
3451 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3452 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3453 iemCImpl_ins_op32_addr32, false);
3454 case IEMMODE_64BIT:
3455 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3456 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3457 iemCImpl_ins_op32_addr64, false);
3458 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3459 }
3460 break;
3461 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3462 }
3463 }
3464}
3465
3466
3467/**
3468 * @opcode 0x6e
3469 */
3470FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3471{
3472 IEMOP_HLP_MIN_186();
3473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3474 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3475 {
3476 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3477 switch (pVCpu->iem.s.enmEffAddrMode)
3478 {
3479 case IEMMODE_16BIT:
3480 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3481 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3482 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3483 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3484 case IEMMODE_32BIT:
3485 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3486 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3487 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3488 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3489 case IEMMODE_64BIT:
3490 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3491 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3492 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3493 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3494 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3495 }
3496 }
3497 else
3498 {
3499 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3500 switch (pVCpu->iem.s.enmEffAddrMode)
3501 {
3502 case IEMMODE_16BIT:
3503 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3504 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3505 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3506 case IEMMODE_32BIT:
3507 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3508 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3509 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3510 case IEMMODE_64BIT:
3511 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3512 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3513 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3515 }
3516 }
3517}
3518
3519
3520/**
3521 * @opcode 0x6f
3522 */
3523FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3524{
3525 IEMOP_HLP_MIN_186();
3526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3527 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3528 {
3529 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3530 switch (pVCpu->iem.s.enmEffOpSize)
3531 {
3532 case IEMMODE_16BIT:
3533 switch (pVCpu->iem.s.enmEffAddrMode)
3534 {
3535 case IEMMODE_16BIT:
3536 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3537 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3538 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3539 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3540 case IEMMODE_32BIT:
3541 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3542 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3543 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3544 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3545 case IEMMODE_64BIT:
3546 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3547 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3548 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3549 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3550 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3551 }
3552 break;
3553 case IEMMODE_64BIT:
3554 case IEMMODE_32BIT:
3555 switch (pVCpu->iem.s.enmEffAddrMode)
3556 {
3557 case IEMMODE_16BIT:
3558 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3559 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3560 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3561 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3562 case IEMMODE_32BIT:
3563 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3564 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3565 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3566 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3567 case IEMMODE_64BIT:
3568 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3569 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3570 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3571 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3572 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3573 }
3574 break;
3575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3576 }
3577 }
3578 else
3579 {
3580 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3581 switch (pVCpu->iem.s.enmEffOpSize)
3582 {
3583 case IEMMODE_16BIT:
3584 switch (pVCpu->iem.s.enmEffAddrMode)
3585 {
3586 case IEMMODE_16BIT:
3587 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3588 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3589 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3590 case IEMMODE_32BIT:
3591 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3592 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3593 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3594 case IEMMODE_64BIT:
3595 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3596 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3597 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3598 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3599 }
3600 break;
3601 case IEMMODE_64BIT:
3602 case IEMMODE_32BIT:
3603 switch (pVCpu->iem.s.enmEffAddrMode)
3604 {
3605 case IEMMODE_16BIT:
3606 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3607 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3608 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3609 case IEMMODE_32BIT:
3610 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3611 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3612 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3613 case IEMMODE_64BIT:
3614 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3615 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3616 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3617 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3618 }
3619 break;
3620 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3621 }
3622 }
3623}
3624
3625
3626/**
3627 * @opcode 0x70
3628 */
3629FNIEMOP_DEF(iemOp_jo_Jb)
3630{
3631 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3632 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3633 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3634
3635 IEM_MC_BEGIN(0, 0, 0, 0);
3636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3637 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3638 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3639 } IEM_MC_ELSE() {
3640 IEM_MC_ADVANCE_RIP_AND_FINISH();
3641 } IEM_MC_ENDIF();
3642 IEM_MC_END();
3643}
3644
3645
3646/**
3647 * @opcode 0x71
3648 */
3649FNIEMOP_DEF(iemOp_jno_Jb)
3650{
3651 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3652 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3653 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3654
3655 IEM_MC_BEGIN(0, 0, 0, 0);
3656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3657 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3658 IEM_MC_ADVANCE_RIP_AND_FINISH();
3659 } IEM_MC_ELSE() {
3660 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3661 } IEM_MC_ENDIF();
3662 IEM_MC_END();
3663}
3664
3665/**
3666 * @opcode 0x72
3667 */
3668FNIEMOP_DEF(iemOp_jc_Jb)
3669{
3670 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3671 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3672 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3673
3674 IEM_MC_BEGIN(0, 0, 0, 0);
3675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3676 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3677 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3678 } IEM_MC_ELSE() {
3679 IEM_MC_ADVANCE_RIP_AND_FINISH();
3680 } IEM_MC_ENDIF();
3681 IEM_MC_END();
3682}
3683
3684
3685/**
3686 * @opcode 0x73
3687 */
3688FNIEMOP_DEF(iemOp_jnc_Jb)
3689{
3690 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3691 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3692 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3693
3694 IEM_MC_BEGIN(0, 0, 0, 0);
3695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3696 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3697 IEM_MC_ADVANCE_RIP_AND_FINISH();
3698 } IEM_MC_ELSE() {
3699 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3700 } IEM_MC_ENDIF();
3701 IEM_MC_END();
3702}
3703
3704
3705/**
3706 * @opcode 0x74
3707 */
3708FNIEMOP_DEF(iemOp_je_Jb)
3709{
3710 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3711 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3712 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3713
3714 IEM_MC_BEGIN(0, 0, 0, 0);
3715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3716 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3717 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3718 } IEM_MC_ELSE() {
3719 IEM_MC_ADVANCE_RIP_AND_FINISH();
3720 } IEM_MC_ENDIF();
3721 IEM_MC_END();
3722}
3723
3724
3725/**
3726 * @opcode 0x75
3727 */
3728FNIEMOP_DEF(iemOp_jne_Jb)
3729{
3730 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3731 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3732 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3733
3734 IEM_MC_BEGIN(0, 0, 0, 0);
3735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3736 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3737 IEM_MC_ADVANCE_RIP_AND_FINISH();
3738 } IEM_MC_ELSE() {
3739 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3740 } IEM_MC_ENDIF();
3741 IEM_MC_END();
3742}
3743
3744
3745/**
3746 * @opcode 0x76
3747 */
3748FNIEMOP_DEF(iemOp_jbe_Jb)
3749{
3750 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3751 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3752 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3753
3754 IEM_MC_BEGIN(0, 0, 0, 0);
3755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3756 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3757 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3758 } IEM_MC_ELSE() {
3759 IEM_MC_ADVANCE_RIP_AND_FINISH();
3760 } IEM_MC_ENDIF();
3761 IEM_MC_END();
3762}
3763
3764
3765/**
3766 * @opcode 0x77
3767 */
3768FNIEMOP_DEF(iemOp_jnbe_Jb)
3769{
3770 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3771 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3772 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3773
3774 IEM_MC_BEGIN(0, 0, 0, 0);
3775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3776 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3777 IEM_MC_ADVANCE_RIP_AND_FINISH();
3778 } IEM_MC_ELSE() {
3779 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3780 } IEM_MC_ENDIF();
3781 IEM_MC_END();
3782}
3783
3784
3785/**
3786 * @opcode 0x78
3787 */
3788FNIEMOP_DEF(iemOp_js_Jb)
3789{
3790 IEMOP_MNEMONIC(js_Jb, "js Jb");
3791 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3792 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3793
3794 IEM_MC_BEGIN(0, 0, 0, 0);
3795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3796 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3797 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3798 } IEM_MC_ELSE() {
3799 IEM_MC_ADVANCE_RIP_AND_FINISH();
3800 } IEM_MC_ENDIF();
3801 IEM_MC_END();
3802}
3803
3804
3805/**
3806 * @opcode 0x79
3807 */
3808FNIEMOP_DEF(iemOp_jns_Jb)
3809{
3810 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3811 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3812 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3813
3814 IEM_MC_BEGIN(0, 0, 0, 0);
3815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3816 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3817 IEM_MC_ADVANCE_RIP_AND_FINISH();
3818 } IEM_MC_ELSE() {
3819 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3820 } IEM_MC_ENDIF();
3821 IEM_MC_END();
3822}
3823
3824
3825/**
3826 * @opcode 0x7a
3827 */
3828FNIEMOP_DEF(iemOp_jp_Jb)
3829{
3830 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3831 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3832 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3833
3834 IEM_MC_BEGIN(0, 0, 0, 0);
3835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3836 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3837 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3838 } IEM_MC_ELSE() {
3839 IEM_MC_ADVANCE_RIP_AND_FINISH();
3840 } IEM_MC_ENDIF();
3841 IEM_MC_END();
3842}
3843
3844
3845/**
3846 * @opcode 0x7b
3847 */
3848FNIEMOP_DEF(iemOp_jnp_Jb)
3849{
3850 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3851 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3852 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3853
3854 IEM_MC_BEGIN(0, 0, 0, 0);
3855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3856 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3857 IEM_MC_ADVANCE_RIP_AND_FINISH();
3858 } IEM_MC_ELSE() {
3859 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3860 } IEM_MC_ENDIF();
3861 IEM_MC_END();
3862}
3863
3864
3865/**
3866 * @opcode 0x7c
3867 */
3868FNIEMOP_DEF(iemOp_jl_Jb)
3869{
3870 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3871 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3872 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3873
3874 IEM_MC_BEGIN(0, 0, 0, 0);
3875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3876 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3877 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3878 } IEM_MC_ELSE() {
3879 IEM_MC_ADVANCE_RIP_AND_FINISH();
3880 } IEM_MC_ENDIF();
3881 IEM_MC_END();
3882}
3883
3884
3885/**
3886 * @opcode 0x7d
3887 */
3888FNIEMOP_DEF(iemOp_jnl_Jb)
3889{
3890 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3891 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3892 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3893
3894 IEM_MC_BEGIN(0, 0, 0, 0);
3895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3896 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3897 IEM_MC_ADVANCE_RIP_AND_FINISH();
3898 } IEM_MC_ELSE() {
3899 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3900 } IEM_MC_ENDIF();
3901 IEM_MC_END();
3902}
3903
3904
3905/**
3906 * @opcode 0x7e
3907 */
3908FNIEMOP_DEF(iemOp_jle_Jb)
3909{
3910 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3911 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3912 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3913
3914 IEM_MC_BEGIN(0, 0, 0, 0);
3915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3916 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3917 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3918 } IEM_MC_ELSE() {
3919 IEM_MC_ADVANCE_RIP_AND_FINISH();
3920 } IEM_MC_ENDIF();
3921 IEM_MC_END();
3922}
3923
3924
3925/**
3926 * @opcode 0x7f
3927 */
3928FNIEMOP_DEF(iemOp_jnle_Jb)
3929{
3930 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3931 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3932 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3933
3934 IEM_MC_BEGIN(0, 0, 0, 0);
3935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3936 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3937 IEM_MC_ADVANCE_RIP_AND_FINISH();
3938 } IEM_MC_ELSE() {
3939 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3940 } IEM_MC_ENDIF();
3941 IEM_MC_END();
3942}
3943
3944
3945/**
3946 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3947 * iemOp_Grp1_Eb_Ib_80.
3948 */
3949#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
3950 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3951 { \
3952 /* register target */ \
3953 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3954 IEM_MC_BEGIN(3, 0, 0, 0); \
3955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3956 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3957 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3958 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3959 \
3960 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3961 IEM_MC_REF_EFLAGS(pEFlags); \
3962 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3963 \
3964 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3965 IEM_MC_END(); \
3966 } \
3967 else \
3968 { \
3969 /* memory target */ \
3970 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3971 { \
3972 IEM_MC_BEGIN(3, 3, 0, 0); \
3973 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3974 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3976 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3977 \
3978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3979 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3980 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3981 IEMOP_HLP_DONE_DECODING(); \
3982 \
3983 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3984 IEM_MC_FETCH_EFLAGS(EFlags); \
3985 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3986 \
3987 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
3988 IEM_MC_COMMIT_EFLAGS(EFlags); \
3989 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3990 IEM_MC_END(); \
3991 } \
3992 else \
3993 { \
3994 (void)0
3995
3996#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3997 IEM_MC_BEGIN(3, 3, 0, 0); \
3998 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3999 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4001 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4002 \
4003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4004 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4005 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4006 IEMOP_HLP_DONE_DECODING(); \
4007 \
4008 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4009 IEM_MC_FETCH_EFLAGS(EFlags); \
4010 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
4011 \
4012 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
4013 IEM_MC_COMMIT_EFLAGS(EFlags); \
4014 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4015 IEM_MC_END(); \
4016 } \
4017 } \
4018 (void)0
4019
4020#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
4021 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4022 { \
4023 /* register target */ \
4024 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4025 IEM_MC_BEGIN(3, 0, 0, 0); \
4026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4027 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4028 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4029 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4030 \
4031 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4032 IEM_MC_REF_EFLAGS(pEFlags); \
4033 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4034 \
4035 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4036 IEM_MC_END(); \
4037 } \
4038 else \
4039 { \
4040 /* memory target */ \
4041 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4042 { \
4043 IEM_MC_BEGIN(3, 3, 0, 0); \
4044 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
4045 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4047 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4048 \
4049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4050 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4051 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4052 IEMOP_HLP_DONE_DECODING(); \
4053 \
4054 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4055 IEM_MC_FETCH_EFLAGS(EFlags); \
4056 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4057 \
4058 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
4059 IEM_MC_COMMIT_EFLAGS(EFlags); \
4060 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4061 IEM_MC_END(); \
4062 } \
4063 else \
4064 { \
4065 (void)0
4066
4067#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
4068 IEMOP_HLP_DONE_DECODING(); \
4069 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4070 } \
4071 } \
4072 (void)0
4073
4074
4075
4076/**
4077 * @opmaps grp1_80,grp1_83
4078 * @opcode /0
4079 */
4080FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
4081{
4082 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
4083 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
4084 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
4085}
4086
4087
4088/**
4089 * @opmaps grp1_80,grp1_83
4090 * @opcode /1
4091 */
4092FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
4093{
4094 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
4095 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
4096 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
4097}
4098
4099
4100/**
4101 * @opmaps grp1_80,grp1_83
4102 * @opcode /2
4103 */
4104FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4105{
4106 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4107 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4108 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4109}
4110
4111
4112/**
4113 * @opmaps grp1_80,grp1_83
4114 * @opcode /3
4115 */
4116FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4117{
4118 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4119 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4120 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4121}
4122
4123
4124/**
4125 * @opmaps grp1_80,grp1_83
4126 * @opcode /4
4127 */
4128FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4129{
4130 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4131 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4132 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4133}
4134
4135
4136/**
4137 * @opmaps grp1_80,grp1_83
4138 * @opcode /5
4139 */
4140FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4141{
4142 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4143 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4144 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4145}
4146
4147
4148/**
4149 * @opmaps grp1_80,grp1_83
4150 * @opcode /6
4151 */
4152FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4153{
4154 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4155 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4156 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4157}
4158
4159
4160/**
4161 * @opmaps grp1_80,grp1_83
4162 * @opcode /7
4163 */
4164FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4165{
4166 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4167 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4168 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4169}
4170
4171
4172/**
4173 * @opcode 0x80
4174 */
4175FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4176{
4177 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4178 switch (IEM_GET_MODRM_REG_8(bRm))
4179 {
4180 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4181 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4182 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4183 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4184 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4185 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4186 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4187 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4189 }
4190}
4191
4192
4193/**
4194 * Body for a group 1 binary operator.
4195 */
4196#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4197 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4198 { \
4199 /* register target */ \
4200 switch (pVCpu->iem.s.enmEffOpSize) \
4201 { \
4202 case IEMMODE_16BIT: \
4203 { \
4204 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4205 IEM_MC_BEGIN(3, 0, 0, 0); \
4206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4207 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4208 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4209 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4210 \
4211 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4212 IEM_MC_REF_EFLAGS(pEFlags); \
4213 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4214 \
4215 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4216 IEM_MC_END(); \
4217 break; \
4218 } \
4219 \
4220 case IEMMODE_32BIT: \
4221 { \
4222 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4223 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4225 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4226 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4227 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4228 \
4229 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4230 IEM_MC_REF_EFLAGS(pEFlags); \
4231 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4232 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4233 \
4234 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4235 IEM_MC_END(); \
4236 break; \
4237 } \
4238 \
4239 case IEMMODE_64BIT: \
4240 { \
4241 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4242 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4244 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4245 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4246 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4247 \
4248 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4249 IEM_MC_REF_EFLAGS(pEFlags); \
4250 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4251 \
4252 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4253 IEM_MC_END(); \
4254 break; \
4255 } \
4256 \
4257 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4258 } \
4259 } \
4260 else \
4261 { \
4262 /* memory target */ \
4263 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4264 { \
4265 switch (pVCpu->iem.s.enmEffOpSize) \
4266 { \
4267 case IEMMODE_16BIT: \
4268 { \
4269 IEM_MC_BEGIN(3, 3, 0, 0); \
4270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4271 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4272 \
4273 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4274 IEMOP_HLP_DONE_DECODING(); \
4275 \
4276 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4277 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4278 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4279 \
4280 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4281 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4282 IEM_MC_FETCH_EFLAGS(EFlags); \
4283 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4284 \
4285 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4286 IEM_MC_COMMIT_EFLAGS(EFlags); \
4287 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4288 IEM_MC_END(); \
4289 break; \
4290 } \
4291 \
4292 case IEMMODE_32BIT: \
4293 { \
4294 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4297 \
4298 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4299 IEMOP_HLP_DONE_DECODING(); \
4300 \
4301 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4302 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4303 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4304 \
4305 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4306 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4307 IEM_MC_FETCH_EFLAGS(EFlags); \
4308 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4309 \
4310 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4311 IEM_MC_COMMIT_EFLAGS(EFlags); \
4312 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4313 IEM_MC_END(); \
4314 break; \
4315 } \
4316 \
4317 case IEMMODE_64BIT: \
4318 { \
4319 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4320 \
4321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4323 \
4324 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4325 IEMOP_HLP_DONE_DECODING(); \
4326 \
4327 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4328 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4329 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4330 \
4331 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4332 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4333 IEM_MC_FETCH_EFLAGS(EFlags); \
4334 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4335 \
4336 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4337 IEM_MC_COMMIT_EFLAGS(EFlags); \
4338 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4339 IEM_MC_END(); \
4340 break; \
4341 } \
4342 \
4343 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4344 } \
4345 } \
4346 else \
4347 { \
4348 (void)0
4349/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4350#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4351 switch (pVCpu->iem.s.enmEffOpSize) \
4352 { \
4353 case IEMMODE_16BIT: \
4354 { \
4355 IEM_MC_BEGIN(3, 3, 0, 0); \
4356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4358 \
4359 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4360 IEMOP_HLP_DONE_DECODING(); \
4361 \
4362 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4363 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4364 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4365 \
4366 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4367 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4368 IEM_MC_FETCH_EFLAGS(EFlags); \
4369 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4370 \
4371 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4372 IEM_MC_COMMIT_EFLAGS(EFlags); \
4373 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4374 IEM_MC_END(); \
4375 break; \
4376 } \
4377 \
4378 case IEMMODE_32BIT: \
4379 { \
4380 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4383 \
4384 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4385 IEMOP_HLP_DONE_DECODING(); \
4386 \
4387 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4388 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4389 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4390 \
4391 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4392 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4393 IEM_MC_FETCH_EFLAGS(EFlags); \
4394 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4395 \
4396 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4397 IEM_MC_COMMIT_EFLAGS(EFlags); \
4398 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4399 IEM_MC_END(); \
4400 break; \
4401 } \
4402 \
4403 case IEMMODE_64BIT: \
4404 { \
4405 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4406 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4408 \
4409 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4410 IEMOP_HLP_DONE_DECODING(); \
4411 \
4412 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4413 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4414 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4415 \
4416 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4417 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4418 IEM_MC_FETCH_EFLAGS(EFlags); \
4419 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4420 \
4421 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4422 IEM_MC_COMMIT_EFLAGS(EFlags); \
4423 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4424 IEM_MC_END(); \
4425 break; \
4426 } \
4427 \
4428 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4429 } \
4430 } \
4431 } \
4432 (void)0
4433
4434/* read-only version */
4435#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4436 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4437 { \
4438 /* register target */ \
4439 switch (pVCpu->iem.s.enmEffOpSize) \
4440 { \
4441 case IEMMODE_16BIT: \
4442 { \
4443 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4444 IEM_MC_BEGIN(3, 0, 0, 0); \
4445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4446 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4447 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4448 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4449 \
4450 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4451 IEM_MC_REF_EFLAGS(pEFlags); \
4452 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4453 \
4454 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4455 IEM_MC_END(); \
4456 break; \
4457 } \
4458 \
4459 case IEMMODE_32BIT: \
4460 { \
4461 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4462 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4464 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4465 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4466 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4467 \
4468 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4469 IEM_MC_REF_EFLAGS(pEFlags); \
4470 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4471 \
4472 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4473 IEM_MC_END(); \
4474 break; \
4475 } \
4476 \
4477 case IEMMODE_64BIT: \
4478 { \
4479 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4480 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4482 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4483 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4484 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4485 \
4486 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4487 IEM_MC_REF_EFLAGS(pEFlags); \
4488 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4489 \
4490 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4491 IEM_MC_END(); \
4492 break; \
4493 } \
4494 \
4495 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4496 } \
4497 } \
4498 else \
4499 { \
4500 /* memory target */ \
4501 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4502 { \
4503 switch (pVCpu->iem.s.enmEffOpSize) \
4504 { \
4505 case IEMMODE_16BIT: \
4506 { \
4507 IEM_MC_BEGIN(3, 3, 0, 0); \
4508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4510 \
4511 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4512 IEMOP_HLP_DONE_DECODING(); \
4513 \
4514 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4515 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4516 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4517 \
4518 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4519 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4520 IEM_MC_FETCH_EFLAGS(EFlags); \
4521 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4522 \
4523 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
4524 IEM_MC_COMMIT_EFLAGS(EFlags); \
4525 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4526 IEM_MC_END(); \
4527 break; \
4528 } \
4529 \
4530 case IEMMODE_32BIT: \
4531 { \
4532 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4535 \
4536 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4537 IEMOP_HLP_DONE_DECODING(); \
4538 \
4539 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4540 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4541 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4542 \
4543 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4544 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4545 IEM_MC_FETCH_EFLAGS(EFlags); \
4546 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4547 \
4548 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
4549 IEM_MC_COMMIT_EFLAGS(EFlags); \
4550 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4551 IEM_MC_END(); \
4552 break; \
4553 } \
4554 \
4555 case IEMMODE_64BIT: \
4556 { \
4557 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4560 \
4561 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4562 IEMOP_HLP_DONE_DECODING(); \
4563 \
4564 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4565 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4566 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4567 \
4568 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4569 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4570 IEM_MC_FETCH_EFLAGS(EFlags); \
4571 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4572 \
4573 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
4574 IEM_MC_COMMIT_EFLAGS(EFlags); \
4575 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4576 IEM_MC_END(); \
4577 break; \
4578 } \
4579 \
4580 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4581 } \
4582 } \
4583 else \
4584 { \
4585 IEMOP_HLP_DONE_DECODING(); \
4586 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4587 } \
4588 } \
4589 (void)0
4590
4591
4592/**
4593 * @opmaps grp1_81
4594 * @opcode /0
4595 */
4596FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4597{
4598 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4599 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4600 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4601}
4602
4603
4604/**
4605 * @opmaps grp1_81
4606 * @opcode /1
4607 */
4608FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4609{
4610 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4611 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4612 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4613}
4614
4615
4616/**
4617 * @opmaps grp1_81
4618 * @opcode /2
4619 */
4620FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4621{
4622 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4623 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4624 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4625}
4626
4627
4628/**
4629 * @opmaps grp1_81
4630 * @opcode /3
4631 */
4632FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4633{
4634 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4635 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4636 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4637}
4638
4639
4640/**
4641 * @opmaps grp1_81
4642 * @opcode /4
4643 */
4644FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4645{
4646 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4647 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4648 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4649}
4650
4651
4652/**
4653 * @opmaps grp1_81
4654 * @opcode /5
4655 */
4656FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4657{
4658 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4659 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4660 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4661}
4662
4663
4664/**
4665 * @opmaps grp1_81
4666 * @opcode /6
4667 */
4668FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4669{
4670 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4671 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4672 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4673}
4674
4675
4676/**
4677 * @opmaps grp1_81
4678 * @opcode /7
4679 */
4680FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4681{
4682 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4683 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4684}
4685
4686
4687/**
4688 * @opcode 0x81
4689 */
4690FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4691{
4692 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4693 switch (IEM_GET_MODRM_REG_8(bRm))
4694 {
4695 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4696 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4697 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4698 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4699 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4700 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4701 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4702 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4703 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4704 }
4705}
4706
4707
4708/**
4709 * @opcode 0x82
4710 * @opmnemonic grp1_82
4711 * @opgroup og_groups
4712 */
4713FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4714{
4715 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4716 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4717}
4718
4719
4720/**
4721 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4722 * iemOp_Grp1_Ev_Ib.
4723 */
4724#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4725 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4726 { \
4727 /* \
4728 * Register target \
4729 */ \
4730 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4731 switch (pVCpu->iem.s.enmEffOpSize) \
4732 { \
4733 case IEMMODE_16BIT: \
4734 IEM_MC_BEGIN(3, 0, 0, 0); \
4735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4736 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4737 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4738 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4739 \
4740 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4741 IEM_MC_REF_EFLAGS(pEFlags); \
4742 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4743 \
4744 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4745 IEM_MC_END(); \
4746 break; \
4747 \
4748 case IEMMODE_32BIT: \
4749 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4751 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4752 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4753 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4754 \
4755 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4756 IEM_MC_REF_EFLAGS(pEFlags); \
4757 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4758 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4759 \
4760 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4761 IEM_MC_END(); \
4762 break; \
4763 \
4764 case IEMMODE_64BIT: \
4765 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4767 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4768 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4769 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4770 \
4771 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4772 IEM_MC_REF_EFLAGS(pEFlags); \
4773 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4774 \
4775 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4776 IEM_MC_END(); \
4777 break; \
4778 \
4779 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4780 } \
4781 } \
4782 else \
4783 { \
4784 /* \
4785 * Memory target. \
4786 */ \
4787 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4788 { \
4789 switch (pVCpu->iem.s.enmEffOpSize) \
4790 { \
4791 case IEMMODE_16BIT: \
4792 IEM_MC_BEGIN(3, 3, 0, 0); \
4793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4795 \
4796 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4797 IEMOP_HLP_DONE_DECODING(); \
4798 \
4799 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4800 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4801 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4802 \
4803 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4804 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4805 IEM_MC_FETCH_EFLAGS(EFlags); \
4806 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4807 \
4808 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4809 IEM_MC_COMMIT_EFLAGS(EFlags); \
4810 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4811 IEM_MC_END(); \
4812 break; \
4813 \
4814 case IEMMODE_32BIT: \
4815 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4818 \
4819 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4820 IEMOP_HLP_DONE_DECODING(); \
4821 \
4822 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4823 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4824 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4825 \
4826 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4827 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4828 IEM_MC_FETCH_EFLAGS(EFlags); \
4829 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4830 \
4831 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4832 IEM_MC_COMMIT_EFLAGS(EFlags); \
4833 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4834 IEM_MC_END(); \
4835 break; \
4836 \
4837 case IEMMODE_64BIT: \
4838 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4841 \
4842 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4843 IEMOP_HLP_DONE_DECODING(); \
4844 \
4845 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4846 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4847 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4848 \
4849 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4850 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4851 IEM_MC_FETCH_EFLAGS(EFlags); \
4852 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4853 \
4854 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4855 IEM_MC_COMMIT_EFLAGS(EFlags); \
4856 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4857 IEM_MC_END(); \
4858 break; \
4859 \
4860 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4861 } \
4862 } \
4863 else \
4864 { \
4865 (void)0
4866/* Separate macro to work around parsing issue in IEMAllInstPython.py */
4867#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4868 switch (pVCpu->iem.s.enmEffOpSize) \
4869 { \
4870 case IEMMODE_16BIT: \
4871 IEM_MC_BEGIN(3, 3, 0, 0); \
4872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4874 \
4875 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4876 IEMOP_HLP_DONE_DECODING(); \
4877 \
4878 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4879 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4880 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4881 \
4882 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4883 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4884 IEM_MC_FETCH_EFLAGS(EFlags); \
4885 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4886 \
4887 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4888 IEM_MC_COMMIT_EFLAGS(EFlags); \
4889 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4890 IEM_MC_END(); \
4891 break; \
4892 \
4893 case IEMMODE_32BIT: \
4894 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4897 \
4898 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4899 IEMOP_HLP_DONE_DECODING(); \
4900 \
4901 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4902 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4903 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4904 \
4905 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4906 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4907 IEM_MC_FETCH_EFLAGS(EFlags); \
4908 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4909 \
4910 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4911 IEM_MC_COMMIT_EFLAGS(EFlags); \
4912 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4913 IEM_MC_END(); \
4914 break; \
4915 \
4916 case IEMMODE_64BIT: \
4917 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4919 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4920 \
4921 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4922 IEMOP_HLP_DONE_DECODING(); \
4923 \
4924 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4925 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4926 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4927 \
4928 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4929 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4930 IEM_MC_FETCH_EFLAGS(EFlags); \
4931 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4932 \
4933 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4934 IEM_MC_COMMIT_EFLAGS(EFlags); \
4935 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4936 IEM_MC_END(); \
4937 break; \
4938 \
4939 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4940 } \
4941 } \
4942 } \
4943 (void)0
4944
4945/* read-only variant */
4946#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4947 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4948 { \
4949 /* \
4950 * Register target \
4951 */ \
4952 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4953 switch (pVCpu->iem.s.enmEffOpSize) \
4954 { \
4955 case IEMMODE_16BIT: \
4956 IEM_MC_BEGIN(3, 0, 0, 0); \
4957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4958 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4959 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4960 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4961 \
4962 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4963 IEM_MC_REF_EFLAGS(pEFlags); \
4964 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4965 \
4966 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4967 IEM_MC_END(); \
4968 break; \
4969 \
4970 case IEMMODE_32BIT: \
4971 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4973 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4974 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4975 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4976 \
4977 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4978 IEM_MC_REF_EFLAGS(pEFlags); \
4979 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4980 \
4981 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4982 IEM_MC_END(); \
4983 break; \
4984 \
4985 case IEMMODE_64BIT: \
4986 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4988 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4989 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4990 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4991 \
4992 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4993 IEM_MC_REF_EFLAGS(pEFlags); \
4994 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4995 \
4996 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4997 IEM_MC_END(); \
4998 break; \
4999 \
5000 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5001 } \
5002 } \
5003 else \
5004 { \
5005 /* \
5006 * Memory target. \
5007 */ \
5008 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
5009 { \
5010 switch (pVCpu->iem.s.enmEffOpSize) \
5011 { \
5012 case IEMMODE_16BIT: \
5013 IEM_MC_BEGIN(3, 3, 0, 0); \
5014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5016 \
5017 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5018 IEMOP_HLP_DONE_DECODING(); \
5019 \
5020 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5021 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
5022 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5023 \
5024 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
5025 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5026 IEM_MC_FETCH_EFLAGS(EFlags); \
5027 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
5028 \
5029 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
5030 IEM_MC_COMMIT_EFLAGS(EFlags); \
5031 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5032 IEM_MC_END(); \
5033 break; \
5034 \
5035 case IEMMODE_32BIT: \
5036 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
5037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5039 \
5040 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5041 IEMOP_HLP_DONE_DECODING(); \
5042 \
5043 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5044 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
5045 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5046 \
5047 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
5048 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5049 IEM_MC_FETCH_EFLAGS(EFlags); \
5050 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
5051 \
5052 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
5053 IEM_MC_COMMIT_EFLAGS(EFlags); \
5054 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5055 IEM_MC_END(); \
5056 break; \
5057 \
5058 case IEMMODE_64BIT: \
5059 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5062 \
5063 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5064 IEMOP_HLP_DONE_DECODING(); \
5065 \
5066 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5067 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
5068 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5069 \
5070 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
5071 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5072 IEM_MC_FETCH_EFLAGS(EFlags); \
5073 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5074 \
5075 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
5076 IEM_MC_COMMIT_EFLAGS(EFlags); \
5077 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5078 IEM_MC_END(); \
5079 break; \
5080 \
5081 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5082 } \
5083 } \
5084 else \
5085 { \
5086 IEMOP_HLP_DONE_DECODING(); \
5087 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5088 } \
5089 } \
5090 (void)0
5091
5092/**
5093 * @opmaps grp1_83
5094 * @opcode /0
5095 */
5096FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5097{
5098 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5099 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
5100 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
5101}
5102
5103
5104/**
5105 * @opmaps grp1_83
5106 * @opcode /1
5107 */
5108FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5109{
5110 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5111 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
5112 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
5113}
5114
5115
5116/**
5117 * @opmaps grp1_83
5118 * @opcode /2
5119 */
5120FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5121{
5122 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5123 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5124 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5125}
5126
5127
5128/**
5129 * @opmaps grp1_83
5130 * @opcode /3
5131 */
5132FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5133{
5134 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5135 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5136 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5137}
5138
5139
5140/**
5141 * @opmaps grp1_83
5142 * @opcode /4
5143 */
5144FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5145{
5146 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5147 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5148 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5149}
5150
5151
5152/**
5153 * @opmaps grp1_83
5154 * @opcode /5
5155 */
5156FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5157{
5158 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5159 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5160 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5161}
5162
5163
5164/**
5165 * @opmaps grp1_83
5166 * @opcode /6
5167 */
5168FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5169{
5170 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5171 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5172 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5173}
5174
5175
5176/**
5177 * @opmaps grp1_83
5178 * @opcode /7
5179 */
5180FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5181{
5182 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5183 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5184}
5185
5186
5187/**
5188 * @opcode 0x83
5189 */
5190FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5191{
5192 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5193 to the 386 even if absent in the intel reference manuals and some
5194 3rd party opcode listings. */
5195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5196 switch (IEM_GET_MODRM_REG_8(bRm))
5197 {
5198 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5199 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5200 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5201 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5202 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5203 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5204 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5205 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5206 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5207 }
5208}
5209
5210
5211/**
5212 * @opcode 0x84
5213 */
5214FNIEMOP_DEF(iemOp_test_Eb_Gb)
5215{
5216 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5217 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5218 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8);
5219 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
5220}
5221
5222
5223/**
5224 * @opcode 0x85
5225 */
5226FNIEMOP_DEF(iemOp_test_Ev_Gv)
5227{
5228 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5229 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5230 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64);
5231}
5232
5233
5234/**
5235 * @opcode 0x86
5236 */
5237FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5238{
5239 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5240 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5241
5242 /*
5243 * If rm is denoting a register, no more instruction bytes.
5244 */
5245 if (IEM_IS_MODRM_REG_MODE(bRm))
5246 {
5247 IEM_MC_BEGIN(0, 2, 0, 0);
5248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5249 IEM_MC_LOCAL(uint8_t, uTmp1);
5250 IEM_MC_LOCAL(uint8_t, uTmp2);
5251
5252 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5253 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5254 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5255 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5256
5257 IEM_MC_ADVANCE_RIP_AND_FINISH();
5258 IEM_MC_END();
5259 }
5260 else
5261 {
5262 /*
5263 * We're accessing memory.
5264 */
5265 IEM_MC_BEGIN(2, 4, 0, 0);
5266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5267 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5268 IEM_MC_LOCAL(uint8_t, uTmpReg);
5269 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
5270 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1);
5271
5272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5274 IEM_MC_MEM_MAP_U8_RW(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5275 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5276 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5277 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
5278 else
5279 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
5280 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Mem, bUnmapInfo);
5281 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5282
5283 IEM_MC_ADVANCE_RIP_AND_FINISH();
5284 IEM_MC_END();
5285 }
5286}
5287
5288
5289/**
5290 * @opcode 0x87
5291 */
5292FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5293{
5294 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5295 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5296
5297 /*
5298 * If rm is denoting a register, no more instruction bytes.
5299 */
5300 if (IEM_IS_MODRM_REG_MODE(bRm))
5301 {
5302 switch (pVCpu->iem.s.enmEffOpSize)
5303 {
5304 case IEMMODE_16BIT:
5305 IEM_MC_BEGIN(0, 2, 0, 0);
5306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5307 IEM_MC_LOCAL(uint16_t, uTmp1);
5308 IEM_MC_LOCAL(uint16_t, uTmp2);
5309
5310 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5311 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5312 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5313 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5314
5315 IEM_MC_ADVANCE_RIP_AND_FINISH();
5316 IEM_MC_END();
5317 break;
5318
5319 case IEMMODE_32BIT:
5320 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5322 IEM_MC_LOCAL(uint32_t, uTmp1);
5323 IEM_MC_LOCAL(uint32_t, uTmp2);
5324
5325 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5326 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5327 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5328 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5329
5330 IEM_MC_ADVANCE_RIP_AND_FINISH();
5331 IEM_MC_END();
5332 break;
5333
5334 case IEMMODE_64BIT:
5335 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5337 IEM_MC_LOCAL(uint64_t, uTmp1);
5338 IEM_MC_LOCAL(uint64_t, uTmp2);
5339
5340 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5341 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5342 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5343 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5344
5345 IEM_MC_ADVANCE_RIP_AND_FINISH();
5346 IEM_MC_END();
5347 break;
5348
5349 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5350 }
5351 }
5352 else
5353 {
5354 /*
5355 * We're accessing memory.
5356 */
5357 switch (pVCpu->iem.s.enmEffOpSize)
5358 {
5359 case IEMMODE_16BIT:
5360 IEM_MC_BEGIN(2, 4, 0, 0);
5361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5362 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5363 IEM_MC_LOCAL(uint16_t, uTmpReg);
5364 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
5365 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1);
5366
5367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5369 IEM_MC_MEM_MAP_U16_RW(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5370 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5371 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5372 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
5373 else
5374 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
5375 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Mem, bUnmapInfo);
5376 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5377
5378 IEM_MC_ADVANCE_RIP_AND_FINISH();
5379 IEM_MC_END();
5380 break;
5381
5382 case IEMMODE_32BIT:
5383 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0);
5384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5385 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5386 IEM_MC_LOCAL(uint32_t, uTmpReg);
5387 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
5388 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1);
5389
5390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5392 IEM_MC_MEM_MAP_U32_RW(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5393 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5394 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5395 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
5396 else
5397 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
5398 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Mem, bUnmapInfo);
5399 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5400
5401 IEM_MC_ADVANCE_RIP_AND_FINISH();
5402 IEM_MC_END();
5403 break;
5404
5405 case IEMMODE_64BIT:
5406 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0);
5407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5408 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5409 IEM_MC_LOCAL(uint64_t, uTmpReg);
5410 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
5411 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1);
5412
5413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5415 IEM_MC_MEM_MAP_U64_RW(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5416 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5417 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5418 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
5419 else
5420 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
5421 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Mem, bUnmapInfo);
5422 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5423
5424 IEM_MC_ADVANCE_RIP_AND_FINISH();
5425 IEM_MC_END();
5426 break;
5427
5428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5429 }
5430 }
5431}
5432
5433
5434/**
5435 * @opcode 0x88
5436 */
5437FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5438{
5439 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5440
5441 uint8_t bRm;
5442 IEM_OPCODE_GET_NEXT_U8(&bRm);
5443
5444 /*
5445 * If rm is denoting a register, no more instruction bytes.
5446 */
5447 if (IEM_IS_MODRM_REG_MODE(bRm))
5448 {
5449 IEM_MC_BEGIN(0, 1, 0, 0);
5450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5451 IEM_MC_LOCAL(uint8_t, u8Value);
5452 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5453 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5454 IEM_MC_ADVANCE_RIP_AND_FINISH();
5455 IEM_MC_END();
5456 }
5457 else
5458 {
5459 /*
5460 * We're writing a register to memory.
5461 */
5462 IEM_MC_BEGIN(0, 2, 0, 0);
5463 IEM_MC_LOCAL(uint8_t, u8Value);
5464 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5467 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5468 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5469 IEM_MC_ADVANCE_RIP_AND_FINISH();
5470 IEM_MC_END();
5471 }
5472}
5473
5474
5475/**
5476 * @opcode 0x89
5477 */
5478FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5479{
5480 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5481
5482 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5483
5484 /*
5485 * If rm is denoting a register, no more instruction bytes.
5486 */
5487 if (IEM_IS_MODRM_REG_MODE(bRm))
5488 {
5489 switch (pVCpu->iem.s.enmEffOpSize)
5490 {
5491 case IEMMODE_16BIT:
5492 IEM_MC_BEGIN(0, 1, 0, 0);
5493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5494 IEM_MC_LOCAL(uint16_t, u16Value);
5495 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5496 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5497 IEM_MC_ADVANCE_RIP_AND_FINISH();
5498 IEM_MC_END();
5499 break;
5500
5501 case IEMMODE_32BIT:
5502 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5504 IEM_MC_LOCAL(uint32_t, u32Value);
5505 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5506 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5507 IEM_MC_ADVANCE_RIP_AND_FINISH();
5508 IEM_MC_END();
5509 break;
5510
5511 case IEMMODE_64BIT:
5512 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5514 IEM_MC_LOCAL(uint64_t, u64Value);
5515 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5516 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5517 IEM_MC_ADVANCE_RIP_AND_FINISH();
5518 IEM_MC_END();
5519 break;
5520
5521 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5522 }
5523 }
5524 else
5525 {
5526 /*
5527 * We're writing a register to memory.
5528 */
5529 switch (pVCpu->iem.s.enmEffOpSize)
5530 {
5531 case IEMMODE_16BIT:
5532 IEM_MC_BEGIN(0, 2, 0, 0);
5533 IEM_MC_LOCAL(uint16_t, u16Value);
5534 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5537 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5538 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5539 IEM_MC_ADVANCE_RIP_AND_FINISH();
5540 IEM_MC_END();
5541 break;
5542
5543 case IEMMODE_32BIT:
5544 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5545 IEM_MC_LOCAL(uint32_t, u32Value);
5546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5547 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5549 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5550 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5551 IEM_MC_ADVANCE_RIP_AND_FINISH();
5552 IEM_MC_END();
5553 break;
5554
5555 case IEMMODE_64BIT:
5556 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5557 IEM_MC_LOCAL(uint64_t, u64Value);
5558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5561 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5562 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5563 IEM_MC_ADVANCE_RIP_AND_FINISH();
5564 IEM_MC_END();
5565 break;
5566
5567 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5568 }
5569 }
5570}
5571
5572
5573/**
5574 * @opcode 0x8a
5575 */
5576FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5577{
5578 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5579
5580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5581
5582 /*
5583 * If rm is denoting a register, no more instruction bytes.
5584 */
5585 if (IEM_IS_MODRM_REG_MODE(bRm))
5586 {
5587 IEM_MC_BEGIN(0, 1, 0, 0);
5588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5589 IEM_MC_LOCAL(uint8_t, u8Value);
5590 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5591 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5592 IEM_MC_ADVANCE_RIP_AND_FINISH();
5593 IEM_MC_END();
5594 }
5595 else
5596 {
5597 /*
5598 * We're loading a register from memory.
5599 */
5600 IEM_MC_BEGIN(0, 2, 0, 0);
5601 IEM_MC_LOCAL(uint8_t, u8Value);
5602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5605 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5606 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5607 IEM_MC_ADVANCE_RIP_AND_FINISH();
5608 IEM_MC_END();
5609 }
5610}
5611
5612
5613/**
5614 * @opcode 0x8b
5615 */
5616FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5617{
5618 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5619
5620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5621
5622 /*
5623 * If rm is denoting a register, no more instruction bytes.
5624 */
5625 if (IEM_IS_MODRM_REG_MODE(bRm))
5626 {
5627 switch (pVCpu->iem.s.enmEffOpSize)
5628 {
5629 case IEMMODE_16BIT:
5630 IEM_MC_BEGIN(0, 1, 0, 0);
5631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5632 IEM_MC_LOCAL(uint16_t, u16Value);
5633 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5634 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5635 IEM_MC_ADVANCE_RIP_AND_FINISH();
5636 IEM_MC_END();
5637 break;
5638
5639 case IEMMODE_32BIT:
5640 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5642 IEM_MC_LOCAL(uint32_t, u32Value);
5643 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5644 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5645 IEM_MC_ADVANCE_RIP_AND_FINISH();
5646 IEM_MC_END();
5647 break;
5648
5649 case IEMMODE_64BIT:
5650 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5652 IEM_MC_LOCAL(uint64_t, u64Value);
5653 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5654 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5655 IEM_MC_ADVANCE_RIP_AND_FINISH();
5656 IEM_MC_END();
5657 break;
5658
5659 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5660 }
5661 }
5662 else
5663 {
5664 /*
5665 * We're loading a register from memory.
5666 */
5667 switch (pVCpu->iem.s.enmEffOpSize)
5668 {
5669 case IEMMODE_16BIT:
5670 IEM_MC_BEGIN(0, 2, 0, 0);
5671 IEM_MC_LOCAL(uint16_t, u16Value);
5672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5675 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5676 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5677 IEM_MC_ADVANCE_RIP_AND_FINISH();
5678 IEM_MC_END();
5679 break;
5680
5681 case IEMMODE_32BIT:
5682 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5683 IEM_MC_LOCAL(uint32_t, u32Value);
5684 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5685 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5687 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5688 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5689 IEM_MC_ADVANCE_RIP_AND_FINISH();
5690 IEM_MC_END();
5691 break;
5692
5693 case IEMMODE_64BIT:
5694 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5695 IEM_MC_LOCAL(uint64_t, u64Value);
5696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5699 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5700 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5701 IEM_MC_ADVANCE_RIP_AND_FINISH();
5702 IEM_MC_END();
5703 break;
5704
5705 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5706 }
5707 }
5708}
5709
5710
5711/**
5712 * opcode 0x63
5713 * @todo Table fixme
5714 */
5715FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5716{
5717 if (!IEM_IS_64BIT_CODE(pVCpu))
5718 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5719 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5720 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5721 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5722}
5723
5724
5725/**
5726 * @opcode 0x8c
5727 */
5728FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5729{
5730 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5731
5732 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5733
5734 /*
5735 * Check that the destination register exists. The REX.R prefix is ignored.
5736 */
5737 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5738 if (iSegReg > X86_SREG_GS)
5739 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5740
5741 /*
5742 * If rm is denoting a register, no more instruction bytes.
5743 * In that case, the operand size is respected and the upper bits are
5744 * cleared (starting with some pentium).
5745 */
5746 if (IEM_IS_MODRM_REG_MODE(bRm))
5747 {
5748 switch (pVCpu->iem.s.enmEffOpSize)
5749 {
5750 case IEMMODE_16BIT:
5751 IEM_MC_BEGIN(0, 1, 0, 0);
5752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5753 IEM_MC_LOCAL(uint16_t, u16Value);
5754 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5755 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5756 IEM_MC_ADVANCE_RIP_AND_FINISH();
5757 IEM_MC_END();
5758 break;
5759
5760 case IEMMODE_32BIT:
5761 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5763 IEM_MC_LOCAL(uint32_t, u32Value);
5764 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5765 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5766 IEM_MC_ADVANCE_RIP_AND_FINISH();
5767 IEM_MC_END();
5768 break;
5769
5770 case IEMMODE_64BIT:
5771 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5773 IEM_MC_LOCAL(uint64_t, u64Value);
5774 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5775 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5776 IEM_MC_ADVANCE_RIP_AND_FINISH();
5777 IEM_MC_END();
5778 break;
5779
5780 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5781 }
5782 }
5783 else
5784 {
5785 /*
5786 * We're saving the register to memory. The access is word sized
5787 * regardless of operand size prefixes.
5788 */
5789#if 0 /* not necessary */
5790 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5791#endif
5792 IEM_MC_BEGIN(0, 2, 0, 0);
5793 IEM_MC_LOCAL(uint16_t, u16Value);
5794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5797 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5798 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5799 IEM_MC_ADVANCE_RIP_AND_FINISH();
5800 IEM_MC_END();
5801 }
5802}
5803
5804
5805
5806
5807/**
5808 * @opcode 0x8d
5809 */
5810FNIEMOP_DEF(iemOp_lea_Gv_M)
5811{
5812 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5813 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5814 if (IEM_IS_MODRM_REG_MODE(bRm))
5815 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5816
5817 switch (pVCpu->iem.s.enmEffOpSize)
5818 {
5819 case IEMMODE_16BIT:
5820 IEM_MC_BEGIN(0, 2, 0, 0);
5821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5824 IEM_MC_LOCAL(uint16_t, u16Cast);
5825 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5826 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5827 IEM_MC_ADVANCE_RIP_AND_FINISH();
5828 IEM_MC_END();
5829 break;
5830
5831 case IEMMODE_32BIT:
5832 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5836 IEM_MC_LOCAL(uint32_t, u32Cast);
5837 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5838 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5839 IEM_MC_ADVANCE_RIP_AND_FINISH();
5840 IEM_MC_END();
5841 break;
5842
5843 case IEMMODE_64BIT:
5844 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5848 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5849 IEM_MC_ADVANCE_RIP_AND_FINISH();
5850 IEM_MC_END();
5851 break;
5852
5853 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5854 }
5855}
5856
5857
5858/**
5859 * @opcode 0x8e
5860 */
5861FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5862{
5863 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5864
5865 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5866
5867 /*
5868 * The practical operand size is 16-bit.
5869 */
5870#if 0 /* not necessary */
5871 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5872#endif
5873
5874 /*
5875 * Check that the destination register exists and can be used with this
5876 * instruction. The REX.R prefix is ignored.
5877 */
5878 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5879 /** @todo r=bird: What does 8086 do here wrt CS? */
5880 if ( iSegReg == X86_SREG_CS
5881 || iSegReg > X86_SREG_GS)
5882 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5883
5884 /*
5885 * If rm is denoting a register, no more instruction bytes.
5886 *
5887 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
5888 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
5889 * register. This is a restriction of the current recompiler
5890 * approach.
5891 */
5892 if (IEM_IS_MODRM_REG_MODE(bRm))
5893 {
5894#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
5895 IEM_MC_BEGIN(2, 0, 0, a_fCImplFlags); \
5896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5897 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5898 IEM_MC_ARG(uint16_t, u16Value, 1); \
5899 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5900 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5901 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5902 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5903 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg), \
5904 iemCImpl_load_SReg, iSRegArg, u16Value); \
5905 IEM_MC_END()
5906
5907 if (iSegReg == X86_SREG_SS)
5908 {
5909 if (IEM_IS_32BIT_CODE(pVCpu))
5910 {
5911 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5912 }
5913 else
5914 {
5915 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5916 }
5917 }
5918 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5919 {
5920 IEMOP_MOV_SW_EV_REG_BODY(0);
5921 }
5922 else
5923 {
5924 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
5925 }
5926#undef IEMOP_MOV_SW_EV_REG_BODY
5927 }
5928 else
5929 {
5930 /*
5931 * We're loading the register from memory. The access is word sized
5932 * regardless of operand size prefixes.
5933 */
5934#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
5935 IEM_MC_BEGIN(2, 1, 0, a_fCImplFlags); \
5936 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5937 IEM_MC_ARG(uint16_t, u16Value, 1); \
5938 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5941 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5942 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5943 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5944 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5945 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg), \
5946 iemCImpl_load_SReg, iSRegArg, u16Value); \
5947 IEM_MC_END()
5948
5949 if (iSegReg == X86_SREG_SS)
5950 {
5951 if (IEM_IS_32BIT_CODE(pVCpu))
5952 {
5953 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5954 }
5955 else
5956 {
5957 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5958 }
5959 }
5960 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5961 {
5962 IEMOP_MOV_SW_EV_MEM_BODY(0);
5963 }
5964 else
5965 {
5966 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
5967 }
5968#undef IEMOP_MOV_SW_EV_MEM_BODY
5969 }
5970}
5971
5972
5973/** Opcode 0x8f /0. */
5974FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5975{
5976 /* This bugger is rather annoying as it requires rSP to be updated before
5977 doing the effective address calculations. Will eventually require a
5978 split between the R/M+SIB decoding and the effective address
5979 calculation - which is something that is required for any attempt at
5980 reusing this code for a recompiler. It may also be good to have if we
5981 need to delay #UD exception caused by invalid lock prefixes.
5982
5983 For now, we'll do a mostly safe interpreter-only implementation here. */
5984 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5985 * now until tests show it's checked.. */
5986 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5987
5988 /* Register access is relatively easy and can share code. */
5989 if (IEM_IS_MODRM_REG_MODE(bRm))
5990 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5991
5992 /*
5993 * Memory target.
5994 *
5995 * Intel says that RSP is incremented before it's used in any effective
5996 * address calcuations. This means some serious extra annoyance here since
5997 * we decode and calculate the effective address in one step and like to
5998 * delay committing registers till everything is done.
5999 *
6000 * So, we'll decode and calculate the effective address twice. This will
6001 * require some recoding if turned into a recompiler.
6002 */
6003 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
6004
6005#if 1 /* This can be compiled, optimize later if needed. */
6006 switch (pVCpu->iem.s.enmEffOpSize)
6007 {
6008 case IEMMODE_16BIT:
6009 IEM_MC_BEGIN(2, 0, 0, 0);
6010 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
6012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6013 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6014 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
6015 IEM_MC_END();
6016 break;
6017
6018 case IEMMODE_32BIT:
6019 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
6020 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
6022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6023 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6024 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
6025 IEM_MC_END();
6026 break;
6027
6028 case IEMMODE_64BIT:
6029 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
6030 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
6032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6033 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6034 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
6035 IEM_MC_END();
6036 break;
6037
6038 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6039 }
6040
6041#else
6042# ifndef TST_IEM_CHECK_MC
6043 /* Calc effective address with modified ESP. */
6044/** @todo testcase */
6045 RTGCPTR GCPtrEff;
6046 VBOXSTRICTRC rcStrict;
6047 switch (pVCpu->iem.s.enmEffOpSize)
6048 {
6049 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
6050 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
6051 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
6052 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6053 }
6054 if (rcStrict != VINF_SUCCESS)
6055 return rcStrict;
6056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6057
6058 /* Perform the operation - this should be CImpl. */
6059 RTUINT64U TmpRsp;
6060 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
6061 switch (pVCpu->iem.s.enmEffOpSize)
6062 {
6063 case IEMMODE_16BIT:
6064 {
6065 uint16_t u16Value;
6066 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
6067 if (rcStrict == VINF_SUCCESS)
6068 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
6069 break;
6070 }
6071
6072 case IEMMODE_32BIT:
6073 {
6074 uint32_t u32Value;
6075 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
6076 if (rcStrict == VINF_SUCCESS)
6077 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
6078 break;
6079 }
6080
6081 case IEMMODE_64BIT:
6082 {
6083 uint64_t u64Value;
6084 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
6085 if (rcStrict == VINF_SUCCESS)
6086 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
6087 break;
6088 }
6089
6090 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6091 }
6092 if (rcStrict == VINF_SUCCESS)
6093 {
6094 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
6095 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
6096 }
6097 return rcStrict;
6098
6099# else
6100 return VERR_IEM_IPE_2;
6101# endif
6102#endif
6103}
6104
6105
6106/**
6107 * @opcode 0x8f
6108 */
6109FNIEMOP_DEF(iemOp_Grp1A__xop)
6110{
6111 /*
6112 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
6113 * three byte VEX prefix, except that the mmmmm field cannot have the values
6114 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
6115 */
6116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6117 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
6118 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
6119
6120 IEMOP_MNEMONIC(xop, "xop");
6121 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
6122 {
6123 /** @todo Test when exctly the XOP conformance checks kick in during
6124 * instruction decoding and fetching (using \#PF). */
6125 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6126 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6127 if ( ( pVCpu->iem.s.fPrefixes
6128 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6129 == 0)
6130 {
6131 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6132 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6133 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6134 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6135 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6136 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6137 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6138 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6139 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6140
6141 /** @todo XOP: Just use new tables and decoders. */
6142 switch (bRm & 0x1f)
6143 {
6144 case 8: /* xop opcode map 8. */
6145 IEMOP_BITCH_ABOUT_STUB();
6146 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6147
6148 case 9: /* xop opcode map 9. */
6149 IEMOP_BITCH_ABOUT_STUB();
6150 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6151
6152 case 10: /* xop opcode map 10. */
6153 IEMOP_BITCH_ABOUT_STUB();
6154 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6155
6156 default:
6157 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6158 IEMOP_RAISE_INVALID_OPCODE_RET();
6159 }
6160 }
6161 else
6162 Log(("XOP: Invalid prefix mix!\n"));
6163 }
6164 else
6165 Log(("XOP: XOP support disabled!\n"));
6166 IEMOP_RAISE_INVALID_OPCODE_RET();
6167}
6168
6169
6170/**
6171 * Common 'xchg reg,rAX' helper.
6172 */
6173FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6174{
6175 iReg |= pVCpu->iem.s.uRexB;
6176 switch (pVCpu->iem.s.enmEffOpSize)
6177 {
6178 case IEMMODE_16BIT:
6179 IEM_MC_BEGIN(0, 2, 0, 0);
6180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6181 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6182 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6183 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6184 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6185 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6186 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6187 IEM_MC_ADVANCE_RIP_AND_FINISH();
6188 IEM_MC_END();
6189 break;
6190
6191 case IEMMODE_32BIT:
6192 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6194 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6195 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6196 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6197 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6198 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6199 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6200 IEM_MC_ADVANCE_RIP_AND_FINISH();
6201 IEM_MC_END();
6202 break;
6203
6204 case IEMMODE_64BIT:
6205 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6207 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6208 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6209 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6210 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6211 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6212 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6213 IEM_MC_ADVANCE_RIP_AND_FINISH();
6214 IEM_MC_END();
6215 break;
6216
6217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6218 }
6219}
6220
6221
6222/**
6223 * @opcode 0x90
6224 */
6225FNIEMOP_DEF(iemOp_nop)
6226{
6227 /* R8/R8D and RAX/EAX can be exchanged. */
6228 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6229 {
6230 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6231 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6232 }
6233
6234 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6235 {
6236 IEMOP_MNEMONIC(pause, "pause");
6237 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6238 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6239 if (!IEM_IS_IN_GUEST(pVCpu))
6240 { /* probable */ }
6241#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6242 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6243 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmx_pause);
6244#endif
6245#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6246 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6247 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_svm_pause);
6248#endif
6249 }
6250 else
6251 IEMOP_MNEMONIC(nop, "nop");
6252 /** @todo testcase: lock nop; lock pause */
6253 IEM_MC_BEGIN(0, 0, 0, 0);
6254 IEMOP_HLP_DONE_DECODING();
6255 IEM_MC_ADVANCE_RIP_AND_FINISH();
6256 IEM_MC_END();
6257}
6258
6259
6260/**
6261 * @opcode 0x91
6262 */
6263FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6264{
6265 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6266 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6267}
6268
6269
6270/**
6271 * @opcode 0x92
6272 */
6273FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6274{
6275 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6276 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6277}
6278
6279
6280/**
6281 * @opcode 0x93
6282 */
6283FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6284{
6285 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6286 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6287}
6288
6289
6290/**
6291 * @opcode 0x94
6292 */
6293FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6294{
6295 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6296 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6297}
6298
6299
6300/**
6301 * @opcode 0x95
6302 */
6303FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6304{
6305 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6306 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6307}
6308
6309
6310/**
6311 * @opcode 0x96
6312 */
6313FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6314{
6315 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6316 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6317}
6318
6319
6320/**
6321 * @opcode 0x97
6322 */
6323FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6324{
6325 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6326 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6327}
6328
6329
6330/**
6331 * @opcode 0x98
6332 */
6333FNIEMOP_DEF(iemOp_cbw)
6334{
6335 switch (pVCpu->iem.s.enmEffOpSize)
6336 {
6337 case IEMMODE_16BIT:
6338 IEMOP_MNEMONIC(cbw, "cbw");
6339 IEM_MC_BEGIN(0, 1, 0, 0);
6340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6341 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6342 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6343 } IEM_MC_ELSE() {
6344 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6345 } IEM_MC_ENDIF();
6346 IEM_MC_ADVANCE_RIP_AND_FINISH();
6347 IEM_MC_END();
6348 break;
6349
6350 case IEMMODE_32BIT:
6351 IEMOP_MNEMONIC(cwde, "cwde");
6352 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6354 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6355 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6356 } IEM_MC_ELSE() {
6357 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6358 } IEM_MC_ENDIF();
6359 IEM_MC_ADVANCE_RIP_AND_FINISH();
6360 IEM_MC_END();
6361 break;
6362
6363 case IEMMODE_64BIT:
6364 IEMOP_MNEMONIC(cdqe, "cdqe");
6365 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6367 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6368 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6369 } IEM_MC_ELSE() {
6370 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6371 } IEM_MC_ENDIF();
6372 IEM_MC_ADVANCE_RIP_AND_FINISH();
6373 IEM_MC_END();
6374 break;
6375
6376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6377 }
6378}
6379
6380
6381/**
6382 * @opcode 0x99
6383 */
6384FNIEMOP_DEF(iemOp_cwd)
6385{
6386 switch (pVCpu->iem.s.enmEffOpSize)
6387 {
6388 case IEMMODE_16BIT:
6389 IEMOP_MNEMONIC(cwd, "cwd");
6390 IEM_MC_BEGIN(0, 1, 0, 0);
6391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6392 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6393 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6394 } IEM_MC_ELSE() {
6395 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6396 } IEM_MC_ENDIF();
6397 IEM_MC_ADVANCE_RIP_AND_FINISH();
6398 IEM_MC_END();
6399 break;
6400
6401 case IEMMODE_32BIT:
6402 IEMOP_MNEMONIC(cdq, "cdq");
6403 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6405 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6406 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6407 } IEM_MC_ELSE() {
6408 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6409 } IEM_MC_ENDIF();
6410 IEM_MC_ADVANCE_RIP_AND_FINISH();
6411 IEM_MC_END();
6412 break;
6413
6414 case IEMMODE_64BIT:
6415 IEMOP_MNEMONIC(cqo, "cqo");
6416 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6418 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6419 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6420 } IEM_MC_ELSE() {
6421 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6422 } IEM_MC_ENDIF();
6423 IEM_MC_ADVANCE_RIP_AND_FINISH();
6424 IEM_MC_END();
6425 break;
6426
6427 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6428 }
6429}
6430
6431
6432/**
6433 * @opcode 0x9a
6434 */
6435FNIEMOP_DEF(iemOp_call_Ap)
6436{
6437 IEMOP_MNEMONIC(call_Ap, "call Ap");
6438 IEMOP_HLP_NO_64BIT();
6439
6440 /* Decode the far pointer address and pass it on to the far call C implementation. */
6441 uint32_t off32Seg;
6442 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6443 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6444 else
6445 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6446 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6448 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
6449 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
6450 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6451 /** @todo make task-switches, ring-switches, ++ return non-zero status */
6452}
6453
6454
6455/** Opcode 0x9b. (aka fwait) */
6456FNIEMOP_DEF(iemOp_wait)
6457{
6458 IEMOP_MNEMONIC(wait, "wait");
6459 IEM_MC_BEGIN(0, 0, 0, 0);
6460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6461 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6462 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6463 IEM_MC_ADVANCE_RIP_AND_FINISH();
6464 IEM_MC_END();
6465}
6466
6467
6468/**
6469 * @opcode 0x9c
6470 */
6471FNIEMOP_DEF(iemOp_pushf_Fv)
6472{
6473 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6475 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6476 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6477 iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6478}
6479
6480
6481/**
6482 * @opcode 0x9d
6483 */
6484FNIEMOP_DEF(iemOp_popf_Fv)
6485{
6486 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6488 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6489 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6490 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6491 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6492}
6493
6494
6495/**
6496 * @opcode 0x9e
6497 */
6498FNIEMOP_DEF(iemOp_sahf)
6499{
6500 IEMOP_MNEMONIC(sahf, "sahf");
6501 if ( IEM_IS_64BIT_CODE(pVCpu)
6502 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6503 IEMOP_RAISE_INVALID_OPCODE_RET();
6504 IEM_MC_BEGIN(0, 2, 0, 0);
6505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6506 IEM_MC_LOCAL(uint32_t, u32Flags);
6507 IEM_MC_LOCAL(uint32_t, EFlags);
6508 IEM_MC_FETCH_EFLAGS(EFlags);
6509 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6510 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6511 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6512 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6513 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6514 IEM_MC_COMMIT_EFLAGS(EFlags);
6515 IEM_MC_ADVANCE_RIP_AND_FINISH();
6516 IEM_MC_END();
6517}
6518
6519
6520/**
6521 * @opcode 0x9f
6522 */
6523FNIEMOP_DEF(iemOp_lahf)
6524{
6525 IEMOP_MNEMONIC(lahf, "lahf");
6526 if ( IEM_IS_64BIT_CODE(pVCpu)
6527 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6528 IEMOP_RAISE_INVALID_OPCODE_RET();
6529 IEM_MC_BEGIN(0, 1, 0, 0);
6530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6531 IEM_MC_LOCAL(uint8_t, u8Flags);
6532 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6533 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6534 IEM_MC_ADVANCE_RIP_AND_FINISH();
6535 IEM_MC_END();
6536}
6537
6538
6539/**
6540 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6541 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6542 * Will return/throw on failures.
6543 * @param a_GCPtrMemOff The variable to store the offset in.
6544 */
6545#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6546 do \
6547 { \
6548 switch (pVCpu->iem.s.enmEffAddrMode) \
6549 { \
6550 case IEMMODE_16BIT: \
6551 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6552 break; \
6553 case IEMMODE_32BIT: \
6554 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6555 break; \
6556 case IEMMODE_64BIT: \
6557 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6558 break; \
6559 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6560 } \
6561 } while (0)
6562
6563/**
6564 * @opcode 0xa0
6565 */
6566FNIEMOP_DEF(iemOp_mov_AL_Ob)
6567{
6568 /*
6569 * Get the offset.
6570 */
6571 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6572 RTGCPTR GCPtrMemOff;
6573 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6574
6575 /*
6576 * Fetch AL.
6577 */
6578 IEM_MC_BEGIN(0, 1, 0, 0);
6579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6580 IEM_MC_LOCAL(uint8_t, u8Tmp);
6581 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6582 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6583 IEM_MC_ADVANCE_RIP_AND_FINISH();
6584 IEM_MC_END();
6585}
6586
6587
6588/**
6589 * @opcode 0xa1
6590 */
6591FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6592{
6593 /*
6594 * Get the offset.
6595 */
6596 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6597 RTGCPTR GCPtrMemOff;
6598 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6599
6600 /*
6601 * Fetch rAX.
6602 */
6603 switch (pVCpu->iem.s.enmEffOpSize)
6604 {
6605 case IEMMODE_16BIT:
6606 IEM_MC_BEGIN(0, 1, 0, 0);
6607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6608 IEM_MC_LOCAL(uint16_t, u16Tmp);
6609 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6610 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6611 IEM_MC_ADVANCE_RIP_AND_FINISH();
6612 IEM_MC_END();
6613 break;
6614
6615 case IEMMODE_32BIT:
6616 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6618 IEM_MC_LOCAL(uint32_t, u32Tmp);
6619 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6620 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6621 IEM_MC_ADVANCE_RIP_AND_FINISH();
6622 IEM_MC_END();
6623 break;
6624
6625 case IEMMODE_64BIT:
6626 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6628 IEM_MC_LOCAL(uint64_t, u64Tmp);
6629 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6630 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6631 IEM_MC_ADVANCE_RIP_AND_FINISH();
6632 IEM_MC_END();
6633 break;
6634
6635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6636 }
6637}
6638
6639
6640/**
6641 * @opcode 0xa2
6642 */
6643FNIEMOP_DEF(iemOp_mov_Ob_AL)
6644{
6645 /*
6646 * Get the offset.
6647 */
6648 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6649 RTGCPTR GCPtrMemOff;
6650 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6651
6652 /*
6653 * Store AL.
6654 */
6655 IEM_MC_BEGIN(0, 1, 0, 0);
6656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6657 IEM_MC_LOCAL(uint8_t, u8Tmp);
6658 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6659 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6660 IEM_MC_ADVANCE_RIP_AND_FINISH();
6661 IEM_MC_END();
6662}
6663
6664
6665/**
6666 * @opcode 0xa3
6667 */
6668FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6669{
6670 /*
6671 * Get the offset.
6672 */
6673 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6674 RTGCPTR GCPtrMemOff;
6675 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6676
6677 /*
6678 * Store rAX.
6679 */
6680 switch (pVCpu->iem.s.enmEffOpSize)
6681 {
6682 case IEMMODE_16BIT:
6683 IEM_MC_BEGIN(0, 1, 0, 0);
6684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6685 IEM_MC_LOCAL(uint16_t, u16Tmp);
6686 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6687 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6688 IEM_MC_ADVANCE_RIP_AND_FINISH();
6689 IEM_MC_END();
6690 break;
6691
6692 case IEMMODE_32BIT:
6693 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6695 IEM_MC_LOCAL(uint32_t, u32Tmp);
6696 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6697 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6698 IEM_MC_ADVANCE_RIP_AND_FINISH();
6699 IEM_MC_END();
6700 break;
6701
6702 case IEMMODE_64BIT:
6703 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6705 IEM_MC_LOCAL(uint64_t, u64Tmp);
6706 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6707 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6708 IEM_MC_ADVANCE_RIP_AND_FINISH();
6709 IEM_MC_END();
6710 break;
6711
6712 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6713 }
6714}
6715
6716/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6717#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
6718 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6720 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6721 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6722 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6723 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6724 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6725 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6726 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6727 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6728 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6729 } IEM_MC_ELSE() { \
6730 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6731 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6732 } IEM_MC_ENDIF(); \
6733 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6734 IEM_MC_END() \
6735
6736/**
6737 * @opcode 0xa4
6738 */
6739FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6740{
6741 /*
6742 * Use the C implementation if a repeat prefix is encountered.
6743 */
6744 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6745 {
6746 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6748 switch (pVCpu->iem.s.enmEffAddrMode)
6749 {
6750 case IEMMODE_16BIT:
6751 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6752 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6753 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6754 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6755 iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6756 case IEMMODE_32BIT:
6757 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6758 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6759 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6760 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6761 iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6762 case IEMMODE_64BIT:
6763 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6764 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6765 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6766 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6767 iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6768 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6769 }
6770 }
6771
6772 /*
6773 * Sharing case implementation with movs[wdq] below.
6774 */
6775 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6776 switch (pVCpu->iem.s.enmEffAddrMode)
6777 {
6778 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6779 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6780 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
6781 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6782 }
6783}
6784
6785
6786/**
6787 * @opcode 0xa5
6788 */
6789FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6790{
6791
6792 /*
6793 * Use the C implementation if a repeat prefix is encountered.
6794 */
6795 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6796 {
6797 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6799 switch (pVCpu->iem.s.enmEffOpSize)
6800 {
6801 case IEMMODE_16BIT:
6802 switch (pVCpu->iem.s.enmEffAddrMode)
6803 {
6804 case IEMMODE_16BIT:
6805 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6806 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6807 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6808 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6809 iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6810 case IEMMODE_32BIT:
6811 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6812 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6813 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6814 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6815 iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6816 case IEMMODE_64BIT:
6817 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6818 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6819 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6820 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6821 iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6822 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6823 }
6824 break;
6825 case IEMMODE_32BIT:
6826 switch (pVCpu->iem.s.enmEffAddrMode)
6827 {
6828 case IEMMODE_16BIT:
6829 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6830 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6831 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6832 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6833 iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6834 case IEMMODE_32BIT:
6835 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6836 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6837 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6838 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6839 iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6840 case IEMMODE_64BIT:
6841 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6842 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6843 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6844 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6845 iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6846 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6847 }
6848 case IEMMODE_64BIT:
6849 switch (pVCpu->iem.s.enmEffAddrMode)
6850 {
6851 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6852 case IEMMODE_32BIT:
6853 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6854 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6855 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6856 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6857 iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6858 case IEMMODE_64BIT:
6859 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6860 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6861 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6862 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6863 iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6864 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6865 }
6866 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6867 }
6868 }
6869
6870 /*
6871 * Annoying double switch here.
6872 * Using ugly macro for implementing the cases, sharing it with movsb.
6873 */
6874 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6875 switch (pVCpu->iem.s.enmEffOpSize)
6876 {
6877 case IEMMODE_16BIT:
6878 switch (pVCpu->iem.s.enmEffAddrMode)
6879 {
6880 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6881 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6882 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
6883 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6884 }
6885 break;
6886
6887 case IEMMODE_32BIT:
6888 switch (pVCpu->iem.s.enmEffAddrMode)
6889 {
6890 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6891 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6892 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
6893 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6894 }
6895 break;
6896
6897 case IEMMODE_64BIT:
6898 switch (pVCpu->iem.s.enmEffAddrMode)
6899 {
6900 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6901 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
6902 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
6903 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6904 }
6905 break;
6906 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6907 }
6908}
6909
6910#undef IEM_MOVS_CASE
6911
6912/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6913#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
6914 IEM_MC_BEGIN(3, 3, a_fMcFlags, 0); \
6915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6916 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
6917 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6918 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6919 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6920 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6921 \
6922 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6923 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
6924 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6925 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
6926 IEM_MC_REF_LOCAL(puValue1, uValue1); \
6927 IEM_MC_REF_EFLAGS(pEFlags); \
6928 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6929 \
6930 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6931 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6932 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6933 } IEM_MC_ELSE() { \
6934 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6935 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6936 } IEM_MC_ENDIF(); \
6937 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6938 IEM_MC_END() \
6939
6940/**
6941 * @opcode 0xa6
6942 */
6943FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6944{
6945
6946 /*
6947 * Use the C implementation if a repeat prefix is encountered.
6948 */
6949 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6950 {
6951 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6953 switch (pVCpu->iem.s.enmEffAddrMode)
6954 {
6955 case IEMMODE_16BIT:
6956 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6957 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6958 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6959 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6960 iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6961 case IEMMODE_32BIT:
6962 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6963 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6964 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6965 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6966 iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6967 case IEMMODE_64BIT:
6968 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6969 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6970 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6971 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6972 iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6973 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6974 }
6975 }
6976 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6977 {
6978 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6980 switch (pVCpu->iem.s.enmEffAddrMode)
6981 {
6982 case IEMMODE_16BIT:
6983 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6984 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6985 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6986 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6987 iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6988 case IEMMODE_32BIT:
6989 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6990 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6991 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6992 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6993 iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6994 case IEMMODE_64BIT:
6995 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6996 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6997 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6998 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6999 iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7001 }
7002 }
7003
7004 /*
7005 * Sharing case implementation with cmps[wdq] below.
7006 */
7007 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
7008 switch (pVCpu->iem.s.enmEffAddrMode)
7009 {
7010 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7011 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7012 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
7013 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7014 }
7015}
7016
7017
7018/**
7019 * @opcode 0xa7
7020 */
7021FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
7022{
7023 /*
7024 * Use the C implementation if a repeat prefix is encountered.
7025 */
7026 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7027 {
7028 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
7029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7030 switch (pVCpu->iem.s.enmEffOpSize)
7031 {
7032 case IEMMODE_16BIT:
7033 switch (pVCpu->iem.s.enmEffAddrMode)
7034 {
7035 case IEMMODE_16BIT:
7036 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7037 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7038 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7039 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7040 iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7041 case IEMMODE_32BIT:
7042 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7043 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7044 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7045 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7046 iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7047 case IEMMODE_64BIT:
7048 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7049 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7050 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7051 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7052 iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7053 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7054 }
7055 break;
7056 case IEMMODE_32BIT:
7057 switch (pVCpu->iem.s.enmEffAddrMode)
7058 {
7059 case IEMMODE_16BIT:
7060 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7061 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7062 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7063 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7064 iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7065 case IEMMODE_32BIT:
7066 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7067 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7068 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7069 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7070 iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7071 case IEMMODE_64BIT:
7072 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7073 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7074 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7075 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7076 iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7078 }
7079 case IEMMODE_64BIT:
7080 switch (pVCpu->iem.s.enmEffAddrMode)
7081 {
7082 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
7083 case IEMMODE_32BIT:
7084 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7085 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7086 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7087 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7088 iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7089 case IEMMODE_64BIT:
7090 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7091 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7092 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7093 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7094 iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7095 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7096 }
7097 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7098 }
7099 }
7100
7101 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7102 {
7103 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
7104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7105 switch (pVCpu->iem.s.enmEffOpSize)
7106 {
7107 case IEMMODE_16BIT:
7108 switch (pVCpu->iem.s.enmEffAddrMode)
7109 {
7110 case IEMMODE_16BIT:
7111 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7112 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7113 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7114 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7115 iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7116 case IEMMODE_32BIT:
7117 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7118 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7119 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7120 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7121 iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7122 case IEMMODE_64BIT:
7123 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7124 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7125 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7126 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7127 iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7128 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7129 }
7130 break;
7131 case IEMMODE_32BIT:
7132 switch (pVCpu->iem.s.enmEffAddrMode)
7133 {
7134 case IEMMODE_16BIT:
7135 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7136 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7137 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7138 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7139 iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7140 case IEMMODE_32BIT:
7141 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7142 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7143 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7144 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7145 iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7146 case IEMMODE_64BIT:
7147 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7148 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7149 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7150 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7151 iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7152 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7153 }
7154 case IEMMODE_64BIT:
7155 switch (pVCpu->iem.s.enmEffAddrMode)
7156 {
7157 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
7158 case IEMMODE_32BIT:
7159 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7160 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7161 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7162 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7163 iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7164 case IEMMODE_64BIT:
7165 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7166 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7167 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7168 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7169 iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7170 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7171 }
7172 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7173 }
7174 }
7175
7176 /*
7177 * Annoying double switch here.
7178 * Using ugly macro for implementing the cases, sharing it with cmpsb.
7179 */
7180 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
7181 switch (pVCpu->iem.s.enmEffOpSize)
7182 {
7183 case IEMMODE_16BIT:
7184 switch (pVCpu->iem.s.enmEffAddrMode)
7185 {
7186 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7187 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7188 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
7189 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7190 }
7191 break;
7192
7193 case IEMMODE_32BIT:
7194 switch (pVCpu->iem.s.enmEffAddrMode)
7195 {
7196 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7197 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7198 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
7199 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7200 }
7201 break;
7202
7203 case IEMMODE_64BIT:
7204 switch (pVCpu->iem.s.enmEffAddrMode)
7205 {
7206 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7207 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
7208 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
7209 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7210 }
7211 break;
7212 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7213 }
7214}
7215
7216#undef IEM_CMPS_CASE
7217
7218/**
7219 * @opcode 0xa8
7220 */
7221FNIEMOP_DEF(iemOp_test_AL_Ib)
7222{
7223 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
7224 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7225 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
7226}
7227
7228
7229/**
7230 * @opcode 0xa9
7231 */
7232FNIEMOP_DEF(iemOp_test_eAX_Iz)
7233{
7234 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
7235 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7236 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
7237}
7238
7239
7240/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
7241#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
7242 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7244 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7245 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7246 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
7247 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7248 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7249 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7250 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7251 } IEM_MC_ELSE() { \
7252 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7253 } IEM_MC_ENDIF(); \
7254 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7255 IEM_MC_END() \
7256
7257/**
7258 * @opcode 0xaa
7259 */
7260FNIEMOP_DEF(iemOp_stosb_Yb_AL)
7261{
7262 /*
7263 * Use the C implementation if a repeat prefix is encountered.
7264 */
7265 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7266 {
7267 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
7268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7269 switch (pVCpu->iem.s.enmEffAddrMode)
7270 {
7271 case IEMMODE_16BIT:
7272 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7273 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7274 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7275 iemCImpl_stos_al_m16);
7276 case IEMMODE_32BIT:
7277 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7278 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7279 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7280 iemCImpl_stos_al_m32);
7281 case IEMMODE_64BIT:
7282 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7283 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7284 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7285 iemCImpl_stos_al_m64);
7286 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7287 }
7288 }
7289
7290 /*
7291 * Sharing case implementation with stos[wdq] below.
7292 */
7293 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
7294 switch (pVCpu->iem.s.enmEffAddrMode)
7295 {
7296 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7297 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7298 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
7299 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7300 }
7301}
7302
7303
7304/**
7305 * @opcode 0xab
7306 */
7307FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7308{
7309 /*
7310 * Use the C implementation if a repeat prefix is encountered.
7311 */
7312 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7313 {
7314 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7316 switch (pVCpu->iem.s.enmEffOpSize)
7317 {
7318 case IEMMODE_16BIT:
7319 switch (pVCpu->iem.s.enmEffAddrMode)
7320 {
7321 case IEMMODE_16BIT:
7322 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7323 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7324 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7325 iemCImpl_stos_ax_m16);
7326 case IEMMODE_32BIT:
7327 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7328 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7329 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7330 iemCImpl_stos_ax_m32);
7331 case IEMMODE_64BIT:
7332 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7333 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7334 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7335 iemCImpl_stos_ax_m64);
7336 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7337 }
7338 break;
7339 case IEMMODE_32BIT:
7340 switch (pVCpu->iem.s.enmEffAddrMode)
7341 {
7342 case IEMMODE_16BIT:
7343 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7344 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7345 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7346 iemCImpl_stos_eax_m16);
7347 case IEMMODE_32BIT:
7348 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7349 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7350 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7351 iemCImpl_stos_eax_m32);
7352 case IEMMODE_64BIT:
7353 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7354 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7355 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7356 iemCImpl_stos_eax_m64);
7357 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7358 }
7359 case IEMMODE_64BIT:
7360 switch (pVCpu->iem.s.enmEffAddrMode)
7361 {
7362 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7363 case IEMMODE_32BIT:
7364 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7365 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7366 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7367 iemCImpl_stos_rax_m32);
7368 case IEMMODE_64BIT:
7369 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7370 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7371 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7372 iemCImpl_stos_rax_m64);
7373 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7374 }
7375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7376 }
7377 }
7378
7379 /*
7380 * Annoying double switch here.
7381 * Using ugly macro for implementing the cases, sharing it with stosb.
7382 */
7383 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7384 switch (pVCpu->iem.s.enmEffOpSize)
7385 {
7386 case IEMMODE_16BIT:
7387 switch (pVCpu->iem.s.enmEffAddrMode)
7388 {
7389 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7390 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7391 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7393 }
7394 break;
7395
7396 case IEMMODE_32BIT:
7397 switch (pVCpu->iem.s.enmEffAddrMode)
7398 {
7399 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7400 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7401 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7402 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7403 }
7404 break;
7405
7406 case IEMMODE_64BIT:
7407 switch (pVCpu->iem.s.enmEffAddrMode)
7408 {
7409 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7410 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7411 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7412 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7413 }
7414 break;
7415 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7416 }
7417}
7418
7419#undef IEM_STOS_CASE
7420
7421/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7422#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7423 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7425 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7426 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7427 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7428 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7429 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7430 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7431 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7432 } IEM_MC_ELSE() { \
7433 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7434 } IEM_MC_ENDIF(); \
7435 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7436 IEM_MC_END() \
7437
7438/**
7439 * @opcode 0xac
7440 */
7441FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7442{
7443 /*
7444 * Use the C implementation if a repeat prefix is encountered.
7445 */
7446 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7447 {
7448 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7450 switch (pVCpu->iem.s.enmEffAddrMode)
7451 {
7452 case IEMMODE_16BIT:
7453 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7454 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7455 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7456 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7457 iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7458 case IEMMODE_32BIT:
7459 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7460 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7461 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7462 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7463 iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7464 case IEMMODE_64BIT:
7465 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7466 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7467 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7468 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7469 iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7470 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7471 }
7472 }
7473
7474 /*
7475 * Sharing case implementation with stos[wdq] below.
7476 */
7477 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7478 switch (pVCpu->iem.s.enmEffAddrMode)
7479 {
7480 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7481 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7482 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7483 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7484 }
7485}
7486
7487
7488/**
7489 * @opcode 0xad
7490 */
7491FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7492{
7493 /*
7494 * Use the C implementation if a repeat prefix is encountered.
7495 */
7496 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7497 {
7498 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7500 switch (pVCpu->iem.s.enmEffOpSize)
7501 {
7502 case IEMMODE_16BIT:
7503 switch (pVCpu->iem.s.enmEffAddrMode)
7504 {
7505 case IEMMODE_16BIT:
7506 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7507 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7508 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7509 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7510 iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7511 case IEMMODE_32BIT:
7512 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7513 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7514 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7515 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7516 iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7517 case IEMMODE_64BIT:
7518 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7519 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7520 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7521 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7522 iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7523 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7524 }
7525 break;
7526 case IEMMODE_32BIT:
7527 switch (pVCpu->iem.s.enmEffAddrMode)
7528 {
7529 case IEMMODE_16BIT:
7530 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7531 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7532 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7533 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7534 iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7535 case IEMMODE_32BIT:
7536 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7537 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7538 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7539 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7540 iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7541 case IEMMODE_64BIT:
7542 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7543 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7544 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7545 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7546 iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7547 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7548 }
7549 case IEMMODE_64BIT:
7550 switch (pVCpu->iem.s.enmEffAddrMode)
7551 {
7552 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7553 case IEMMODE_32BIT:
7554 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7555 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7556 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7557 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7558 iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7559 case IEMMODE_64BIT:
7560 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7561 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7562 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7563 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7564 iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7565 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7566 }
7567 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7568 }
7569 }
7570
7571 /*
7572 * Annoying double switch here.
7573 * Using ugly macro for implementing the cases, sharing it with lodsb.
7574 */
7575 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7576 switch (pVCpu->iem.s.enmEffOpSize)
7577 {
7578 case IEMMODE_16BIT:
7579 switch (pVCpu->iem.s.enmEffAddrMode)
7580 {
7581 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7582 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7583 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
7584 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7585 }
7586 break;
7587
7588 case IEMMODE_32BIT:
7589 switch (pVCpu->iem.s.enmEffAddrMode)
7590 {
7591 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7592 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7593 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
7594 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7595 }
7596 break;
7597
7598 case IEMMODE_64BIT:
7599 switch (pVCpu->iem.s.enmEffAddrMode)
7600 {
7601 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7602 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
7603 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
7604 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7605 }
7606 break;
7607 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7608 }
7609}
7610
7611#undef IEM_LODS_CASE
7612
7613/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7614#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
7615 IEM_MC_BEGIN(3, 2, a_fMcFlags, 0); \
7616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7617 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7618 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7619 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7620 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7621 \
7622 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7623 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7624 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7625 IEM_MC_REF_EFLAGS(pEFlags); \
7626 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7627 \
7628 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7629 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7630 } IEM_MC_ELSE() { \
7631 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7632 } IEM_MC_ENDIF(); \
7633 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7634 IEM_MC_END();
7635
7636/**
7637 * @opcode 0xae
7638 */
7639FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7640{
7641 /*
7642 * Use the C implementation if a repeat prefix is encountered.
7643 */
7644 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7645 {
7646 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7648 switch (pVCpu->iem.s.enmEffAddrMode)
7649 {
7650 case IEMMODE_16BIT:
7651 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7652 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7653 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7654 iemCImpl_repe_scas_al_m16);
7655 case IEMMODE_32BIT:
7656 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7657 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7658 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7659 iemCImpl_repe_scas_al_m32);
7660 case IEMMODE_64BIT:
7661 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7662 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7663 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7664 iemCImpl_repe_scas_al_m64);
7665 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7666 }
7667 }
7668 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7669 {
7670 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7672 switch (pVCpu->iem.s.enmEffAddrMode)
7673 {
7674 case IEMMODE_16BIT:
7675 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7676 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7677 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7678 iemCImpl_repne_scas_al_m16);
7679 case IEMMODE_32BIT:
7680 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7681 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7682 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7683 iemCImpl_repne_scas_al_m32);
7684 case IEMMODE_64BIT:
7685 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7686 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7687 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7688 iemCImpl_repne_scas_al_m64);
7689 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7690 }
7691 }
7692
7693 /*
7694 * Sharing case implementation with stos[wdq] below.
7695 */
7696 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7697 switch (pVCpu->iem.s.enmEffAddrMode)
7698 {
7699 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7700 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7701 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
7702 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7703 }
7704}
7705
7706
7707/**
7708 * @opcode 0xaf
7709 */
7710FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7711{
7712 /*
7713 * Use the C implementation if a repeat prefix is encountered.
7714 */
7715 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7716 {
7717 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7719 switch (pVCpu->iem.s.enmEffOpSize)
7720 {
7721 case IEMMODE_16BIT:
7722 switch (pVCpu->iem.s.enmEffAddrMode)
7723 {
7724 case IEMMODE_16BIT:
7725 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7726 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7727 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7728 iemCImpl_repe_scas_ax_m16);
7729 case IEMMODE_32BIT:
7730 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7731 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7732 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7733 iemCImpl_repe_scas_ax_m32);
7734 case IEMMODE_64BIT:
7735 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7736 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7737 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7738 iemCImpl_repe_scas_ax_m64);
7739 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7740 }
7741 break;
7742 case IEMMODE_32BIT:
7743 switch (pVCpu->iem.s.enmEffAddrMode)
7744 {
7745 case IEMMODE_16BIT:
7746 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7747 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7748 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7749 iemCImpl_repe_scas_eax_m16);
7750 case IEMMODE_32BIT:
7751 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7752 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7753 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7754 iemCImpl_repe_scas_eax_m32);
7755 case IEMMODE_64BIT:
7756 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7757 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7758 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7759 iemCImpl_repe_scas_eax_m64);
7760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7761 }
7762 case IEMMODE_64BIT:
7763 switch (pVCpu->iem.s.enmEffAddrMode)
7764 {
7765 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7766 case IEMMODE_32BIT:
7767 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7768 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7769 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7770 iemCImpl_repe_scas_rax_m32);
7771 case IEMMODE_64BIT:
7772 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7773 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7774 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7775 iemCImpl_repe_scas_rax_m64);
7776 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7777 }
7778 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7779 }
7780 }
7781 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7782 {
7783 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7785 switch (pVCpu->iem.s.enmEffOpSize)
7786 {
7787 case IEMMODE_16BIT:
7788 switch (pVCpu->iem.s.enmEffAddrMode)
7789 {
7790 case IEMMODE_16BIT:
7791 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7792 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7793 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7794 iemCImpl_repne_scas_ax_m16);
7795 case IEMMODE_32BIT:
7796 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7797 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7798 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7799 iemCImpl_repne_scas_ax_m32);
7800 case IEMMODE_64BIT:
7801 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7802 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7803 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7804 iemCImpl_repne_scas_ax_m64);
7805 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7806 }
7807 break;
7808 case IEMMODE_32BIT:
7809 switch (pVCpu->iem.s.enmEffAddrMode)
7810 {
7811 case IEMMODE_16BIT:
7812 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7813 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7814 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7815 iemCImpl_repne_scas_eax_m16);
7816 case IEMMODE_32BIT:
7817 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7818 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7819 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7820 iemCImpl_repne_scas_eax_m32);
7821 case IEMMODE_64BIT:
7822 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7823 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7824 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7825 iemCImpl_repne_scas_eax_m64);
7826 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7827 }
7828 case IEMMODE_64BIT:
7829 switch (pVCpu->iem.s.enmEffAddrMode)
7830 {
7831 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
7832 case IEMMODE_32BIT:
7833 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7834 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7835 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7836 iemCImpl_repne_scas_rax_m32);
7837 case IEMMODE_64BIT:
7838 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7839 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7840 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7841 iemCImpl_repne_scas_rax_m64);
7842 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7843 }
7844 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7845 }
7846 }
7847
7848 /*
7849 * Annoying double switch here.
7850 * Using ugly macro for implementing the cases, sharing it with scasb.
7851 */
7852 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
7853 switch (pVCpu->iem.s.enmEffOpSize)
7854 {
7855 case IEMMODE_16BIT:
7856 switch (pVCpu->iem.s.enmEffAddrMode)
7857 {
7858 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7859 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7860 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
7861 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7862 }
7863 break;
7864
7865 case IEMMODE_32BIT:
7866 switch (pVCpu->iem.s.enmEffAddrMode)
7867 {
7868 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7869 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7870 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
7871 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7872 }
7873 break;
7874
7875 case IEMMODE_64BIT:
7876 switch (pVCpu->iem.s.enmEffAddrMode)
7877 {
7878 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7879 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
7880 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
7881 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7882 }
7883 break;
7884 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7885 }
7886}
7887
7888#undef IEM_SCAS_CASE
7889
7890/**
7891 * Common 'mov r8, imm8' helper.
7892 */
7893FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
7894{
7895 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7896 IEM_MC_BEGIN(0, 0, 0, 0);
7897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7898 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
7899 IEM_MC_ADVANCE_RIP_AND_FINISH();
7900 IEM_MC_END();
7901}
7902
7903
7904/**
7905 * @opcode 0xb0
7906 */
7907FNIEMOP_DEF(iemOp_mov_AL_Ib)
7908{
7909 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
7910 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7911}
7912
7913
7914/**
7915 * @opcode 0xb1
7916 */
7917FNIEMOP_DEF(iemOp_CL_Ib)
7918{
7919 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
7920 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7921}
7922
7923
7924/**
7925 * @opcode 0xb2
7926 */
7927FNIEMOP_DEF(iemOp_DL_Ib)
7928{
7929 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
7930 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7931}
7932
7933
7934/**
7935 * @opcode 0xb3
7936 */
7937FNIEMOP_DEF(iemOp_BL_Ib)
7938{
7939 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
7940 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7941}
7942
7943
7944/**
7945 * @opcode 0xb4
7946 */
7947FNIEMOP_DEF(iemOp_mov_AH_Ib)
7948{
7949 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
7950 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7951}
7952
7953
7954/**
7955 * @opcode 0xb5
7956 */
7957FNIEMOP_DEF(iemOp_CH_Ib)
7958{
7959 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
7960 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7961}
7962
7963
7964/**
7965 * @opcode 0xb6
7966 */
7967FNIEMOP_DEF(iemOp_DH_Ib)
7968{
7969 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
7970 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7971}
7972
7973
7974/**
7975 * @opcode 0xb7
7976 */
7977FNIEMOP_DEF(iemOp_BH_Ib)
7978{
7979 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
7980 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7981}
7982
7983
7984/**
7985 * Common 'mov regX,immX' helper.
7986 */
7987FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
7988{
7989 switch (pVCpu->iem.s.enmEffOpSize)
7990 {
7991 case IEMMODE_16BIT:
7992 IEM_MC_BEGIN(0, 0, 0, 0);
7993 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7995 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
7996 IEM_MC_ADVANCE_RIP_AND_FINISH();
7997 IEM_MC_END();
7998 break;
7999
8000 case IEMMODE_32BIT:
8001 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8002 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8004 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
8005 IEM_MC_ADVANCE_RIP_AND_FINISH();
8006 IEM_MC_END();
8007 break;
8008
8009 case IEMMODE_64BIT:
8010 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8011 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
8012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8013 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
8014 IEM_MC_ADVANCE_RIP_AND_FINISH();
8015 IEM_MC_END();
8016 break;
8017 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8018 }
8019}
8020
8021
8022/**
8023 * @opcode 0xb8
8024 */
8025FNIEMOP_DEF(iemOp_eAX_Iv)
8026{
8027 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
8028 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8029}
8030
8031
8032/**
8033 * @opcode 0xb9
8034 */
8035FNIEMOP_DEF(iemOp_eCX_Iv)
8036{
8037 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
8038 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8039}
8040
8041
8042/**
8043 * @opcode 0xba
8044 */
8045FNIEMOP_DEF(iemOp_eDX_Iv)
8046{
8047 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
8048 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8049}
8050
8051
8052/**
8053 * @opcode 0xbb
8054 */
8055FNIEMOP_DEF(iemOp_eBX_Iv)
8056{
8057 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
8058 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8059}
8060
8061
8062/**
8063 * @opcode 0xbc
8064 */
8065FNIEMOP_DEF(iemOp_eSP_Iv)
8066{
8067 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
8068 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8069}
8070
8071
8072/**
8073 * @opcode 0xbd
8074 */
8075FNIEMOP_DEF(iemOp_eBP_Iv)
8076{
8077 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
8078 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8079}
8080
8081
8082/**
8083 * @opcode 0xbe
8084 */
8085FNIEMOP_DEF(iemOp_eSI_Iv)
8086{
8087 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
8088 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8089}
8090
8091
8092/**
8093 * @opcode 0xbf
8094 */
8095FNIEMOP_DEF(iemOp_eDI_Iv)
8096{
8097 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
8098 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8099}
8100
8101
8102/**
8103 * @opcode 0xc0
8104 */
8105FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
8106{
8107 IEMOP_HLP_MIN_186();
8108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8109 PCIEMOPSHIFTSIZES pImpl;
8110 switch (IEM_GET_MODRM_REG_8(bRm))
8111 {
8112 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
8113 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
8114 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
8115 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
8116 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
8117 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
8118 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
8119 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8120 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8121 }
8122 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8123
8124 if (IEM_IS_MODRM_REG_MODE(bRm))
8125 {
8126 /* register */
8127 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8128 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
8129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8130 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8131 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8133 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8134 IEM_MC_REF_EFLAGS(pEFlags);
8135 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8136 IEM_MC_ADVANCE_RIP_AND_FINISH();
8137 IEM_MC_END();
8138 }
8139 else
8140 {
8141 /* memory */
8142 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0);
8143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8144 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8145
8146 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8148
8149 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8150 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8151 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8152
8153 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8154 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8155 IEM_MC_FETCH_EFLAGS(EFlags);
8156 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8157
8158 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8159 IEM_MC_COMMIT_EFLAGS(EFlags);
8160 IEM_MC_ADVANCE_RIP_AND_FINISH();
8161 IEM_MC_END();
8162 }
8163}
8164
8165
8166/**
8167 * @opcode 0xc1
8168 */
8169FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
8170{
8171 IEMOP_HLP_MIN_186();
8172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8173 PCIEMOPSHIFTSIZES pImpl;
8174 switch (IEM_GET_MODRM_REG_8(bRm))
8175 {
8176 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
8177 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
8178 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
8179 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
8180 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
8181 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
8182 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
8183 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8184 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8185 }
8186 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8187
8188 if (IEM_IS_MODRM_REG_MODE(bRm))
8189 {
8190 /* register */
8191 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8192 switch (pVCpu->iem.s.enmEffOpSize)
8193 {
8194 case IEMMODE_16BIT:
8195 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
8196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8197 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8198 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8199 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8200 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8201 IEM_MC_REF_EFLAGS(pEFlags);
8202 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8203 IEM_MC_ADVANCE_RIP_AND_FINISH();
8204 IEM_MC_END();
8205 break;
8206
8207 case IEMMODE_32BIT:
8208 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8210 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8211 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8212 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8213 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8214 IEM_MC_REF_EFLAGS(pEFlags);
8215 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8216 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
8217 IEM_MC_ADVANCE_RIP_AND_FINISH();
8218 IEM_MC_END();
8219 break;
8220
8221 case IEMMODE_64BIT:
8222 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8224 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8225 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8226 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8227 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8228 IEM_MC_REF_EFLAGS(pEFlags);
8229 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8230 IEM_MC_ADVANCE_RIP_AND_FINISH();
8231 IEM_MC_END();
8232 break;
8233
8234 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8235 }
8236 }
8237 else
8238 {
8239 /* memory */
8240 switch (pVCpu->iem.s.enmEffOpSize)
8241 {
8242 case IEMMODE_16BIT:
8243 IEM_MC_BEGIN(3, 3, 0, 0);
8244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8246
8247 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8249
8250 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8251 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8252 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8253
8254 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8255 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8256 IEM_MC_FETCH_EFLAGS(EFlags);
8257 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8258
8259 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
8260 IEM_MC_COMMIT_EFLAGS(EFlags);
8261 IEM_MC_ADVANCE_RIP_AND_FINISH();
8262 IEM_MC_END();
8263 break;
8264
8265 case IEMMODE_32BIT:
8266 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8268 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8269
8270 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8272
8273 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8274 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8275 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8276
8277 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8278 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8279 IEM_MC_FETCH_EFLAGS(EFlags);
8280 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8281
8282 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
8283 IEM_MC_COMMIT_EFLAGS(EFlags);
8284 IEM_MC_ADVANCE_RIP_AND_FINISH();
8285 IEM_MC_END();
8286 break;
8287
8288 case IEMMODE_64BIT:
8289 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8290 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8292
8293 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8295
8296 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8297 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8298 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8299
8300 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8301 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8302 IEM_MC_FETCH_EFLAGS(EFlags);
8303 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8304
8305 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
8306 IEM_MC_COMMIT_EFLAGS(EFlags);
8307 IEM_MC_ADVANCE_RIP_AND_FINISH();
8308 IEM_MC_END();
8309 break;
8310
8311 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8312 }
8313 }
8314}
8315
8316
8317/**
8318 * @opcode 0xc2
8319 */
8320FNIEMOP_DEF(iemOp_retn_Iw)
8321{
8322 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
8323 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8324 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8326 switch (pVCpu->iem.s.enmEffOpSize)
8327 {
8328 case IEMMODE_16BIT:
8329 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_iw_16, u16Imm);
8330 case IEMMODE_32BIT:
8331 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_iw_32, u16Imm);
8332 case IEMMODE_64BIT:
8333 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_iw_64, u16Imm);
8334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8335 }
8336}
8337
8338
8339/**
8340 * @opcode 0xc3
8341 */
8342FNIEMOP_DEF(iemOp_retn)
8343{
8344 IEMOP_MNEMONIC(retn, "retn");
8345 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8347 switch (pVCpu->iem.s.enmEffOpSize)
8348 {
8349 case IEMMODE_16BIT:
8350 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_16);
8351 case IEMMODE_32BIT:
8352 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_32);
8353 case IEMMODE_64BIT:
8354 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_64);
8355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8356 }
8357}
8358
8359
8360/**
8361 * @opcode 0xc4
8362 */
8363FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
8364{
8365 /* The LDS instruction is invalid 64-bit mode. In legacy and
8366 compatability mode it is invalid with MOD=3.
8367 The use as a VEX prefix is made possible by assigning the inverted
8368 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
8369 outside of 64-bit mode. VEX is not available in real or v86 mode. */
8370 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8371 if ( IEM_IS_64BIT_CODE(pVCpu)
8372 || IEM_IS_MODRM_REG_MODE(bRm) )
8373 {
8374 IEMOP_MNEMONIC(vex3_prefix, "vex3");
8375 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8376 {
8377 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8378 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8379 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
8380 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8381 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8382 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
8383 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
8384 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8385 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
8386 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
8387 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
8388 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
8389 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
8390
8391 switch (bRm & 0x1f)
8392 {
8393 case 1: /* 0x0f lead opcode byte. */
8394#ifdef IEM_WITH_VEX
8395 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8396#else
8397 IEMOP_BITCH_ABOUT_STUB();
8398 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8399#endif
8400
8401 case 2: /* 0x0f 0x38 lead opcode bytes. */
8402#ifdef IEM_WITH_VEX
8403 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8404#else
8405 IEMOP_BITCH_ABOUT_STUB();
8406 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8407#endif
8408
8409 case 3: /* 0x0f 0x3a lead opcode bytes. */
8410#ifdef IEM_WITH_VEX
8411 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8412#else
8413 IEMOP_BITCH_ABOUT_STUB();
8414 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8415#endif
8416
8417 default:
8418 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
8419 IEMOP_RAISE_INVALID_OPCODE_RET();
8420 }
8421 }
8422 Log(("VEX3: VEX support disabled!\n"));
8423 IEMOP_RAISE_INVALID_OPCODE_RET();
8424 }
8425
8426 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
8427 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
8428}
8429
8430
8431/**
8432 * @opcode 0xc5
8433 */
8434FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
8435{
8436 /* The LES instruction is invalid 64-bit mode. In legacy and
8437 compatability mode it is invalid with MOD=3.
8438 The use as a VEX prefix is made possible by assigning the inverted
8439 REX.R to the top MOD bit, and the top bit in the inverted register
8440 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
8441 to accessing registers 0..7 in this VEX form. */
8442 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8443 if ( IEM_IS_64BIT_CODE(pVCpu)
8444 || IEM_IS_MODRM_REG_MODE(bRm))
8445 {
8446 IEMOP_MNEMONIC(vex2_prefix, "vex2");
8447 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8448 {
8449 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8450 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8451 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8452 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8453 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8454 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
8455 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
8456 pVCpu->iem.s.idxPrefix = bRm & 0x3;
8457
8458#ifdef IEM_WITH_VEX
8459 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8460#else
8461 IEMOP_BITCH_ABOUT_STUB();
8462 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8463#endif
8464 }
8465
8466 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
8467 Log(("VEX2: VEX support disabled!\n"));
8468 IEMOP_RAISE_INVALID_OPCODE_RET();
8469 }
8470
8471 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
8472 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
8473}
8474
8475
8476/**
8477 * @opcode 0xc6
8478 */
8479FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
8480{
8481 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8482 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8483 IEMOP_RAISE_INVALID_OPCODE_RET();
8484 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
8485
8486 if (IEM_IS_MODRM_REG_MODE(bRm))
8487 {
8488 /* register access */
8489 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8490 IEM_MC_BEGIN(0, 0, 0, 0);
8491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8492 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
8493 IEM_MC_ADVANCE_RIP_AND_FINISH();
8494 IEM_MC_END();
8495 }
8496 else
8497 {
8498 /* memory access. */
8499 IEM_MC_BEGIN(0, 1, 0, 0);
8500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8502 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8504 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
8505 IEM_MC_ADVANCE_RIP_AND_FINISH();
8506 IEM_MC_END();
8507 }
8508}
8509
8510
8511/**
8512 * @opcode 0xc7
8513 */
8514FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
8515{
8516 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8517 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
8518 IEMOP_RAISE_INVALID_OPCODE_RET();
8519 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
8520
8521 if (IEM_IS_MODRM_REG_MODE(bRm))
8522 {
8523 /* register access */
8524 switch (pVCpu->iem.s.enmEffOpSize)
8525 {
8526 case IEMMODE_16BIT:
8527 IEM_MC_BEGIN(0, 0, 0, 0);
8528 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8530 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8531 IEM_MC_ADVANCE_RIP_AND_FINISH();
8532 IEM_MC_END();
8533 break;
8534
8535 case IEMMODE_32BIT:
8536 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8537 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8539 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8540 IEM_MC_ADVANCE_RIP_AND_FINISH();
8541 IEM_MC_END();
8542 break;
8543
8544 case IEMMODE_64BIT:
8545 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
8546 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8548 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8549 IEM_MC_ADVANCE_RIP_AND_FINISH();
8550 IEM_MC_END();
8551 break;
8552
8553 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8554 }
8555 }
8556 else
8557 {
8558 /* memory access. */
8559 switch (pVCpu->iem.s.enmEffOpSize)
8560 {
8561 case IEMMODE_16BIT:
8562 IEM_MC_BEGIN(0, 1, 0, 0);
8563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8565 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8567 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8568 IEM_MC_ADVANCE_RIP_AND_FINISH();
8569 IEM_MC_END();
8570 break;
8571
8572 case IEMMODE_32BIT:
8573 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8576 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8578 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8579 IEM_MC_ADVANCE_RIP_AND_FINISH();
8580 IEM_MC_END();
8581 break;
8582
8583 case IEMMODE_64BIT:
8584 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8587 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8589 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8590 IEM_MC_ADVANCE_RIP_AND_FINISH();
8591 IEM_MC_END();
8592 break;
8593
8594 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8595 }
8596 }
8597}
8598
8599
8600
8601
8602/**
8603 * @opcode 0xc8
8604 */
8605FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8606{
8607 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8608 IEMOP_HLP_MIN_186();
8609 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8610 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
8611 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
8612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8613 IEM_MC_DEFER_TO_CIMPL_3_RET(0,
8614 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8615 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8616 iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
8617}
8618
8619
8620/**
8621 * @opcode 0xc9
8622 */
8623FNIEMOP_DEF(iemOp_leave)
8624{
8625 IEMOP_MNEMONIC(leave, "leave");
8626 IEMOP_HLP_MIN_186();
8627 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8629 IEM_MC_DEFER_TO_CIMPL_1_RET(0,
8630 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8631 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8632 iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
8633}
8634
8635
8636/**
8637 * @opcode 0xca
8638 */
8639FNIEMOP_DEF(iemOp_retf_Iw)
8640{
8641 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
8642 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8644 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8645 | IEM_CIMPL_F_MODE,
8646 RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8647 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8648 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8649 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8650 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8651 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8652 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8653 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8654 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8655 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8656 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8657 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS),
8658 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
8659}
8660
8661
8662/**
8663 * @opcode 0xcb
8664 */
8665FNIEMOP_DEF(iemOp_retf)
8666{
8667 IEMOP_MNEMONIC(retf, "retf");
8668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8669 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8670 | IEM_CIMPL_F_MODE,
8671 RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8672 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8673 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8674 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8675 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8676 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8677 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8678 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8679 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8680 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8681 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8682 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS),
8683 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
8684}
8685
8686
8687/**
8688 * @opcode 0xcc
8689 */
8690FNIEMOP_DEF(iemOp_int3)
8691{
8692 IEMOP_MNEMONIC(int3, "int3");
8693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8694 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8695 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
8696 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
8697}
8698
8699
8700/**
8701 * @opcode 0xcd
8702 */
8703FNIEMOP_DEF(iemOp_int_Ib)
8704{
8705 IEMOP_MNEMONIC(int_Ib, "int Ib");
8706 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
8707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8708 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8709 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, UINT64_MAX,
8710 iemCImpl_int, u8Int, IEMINT_INTN);
8711 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8712}
8713
8714
8715/**
8716 * @opcode 0xce
8717 */
8718FNIEMOP_DEF(iemOp_into)
8719{
8720 IEMOP_MNEMONIC(into, "into");
8721 IEMOP_HLP_NO_64BIT();
8722 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8723 | IEM_CIMPL_F_BRANCH_CONDITIONAL | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8724 UINT64_MAX,
8725 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
8726 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8727}
8728
8729
8730/**
8731 * @opcode 0xcf
8732 */
8733FNIEMOP_DEF(iemOp_iret)
8734{
8735 IEMOP_MNEMONIC(iret, "iret");
8736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8737 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8738 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
8739 RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8740 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8741 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8742 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8743 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8744 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8745 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8746 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8747 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8748 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8749 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8750 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS),
8751 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
8752 /* Segment registers are sanitized when returning to an outer ring, or fully
8753 reloaded when returning to v86 mode. Thus the large flush list above. */
8754}
8755
8756
8757/**
8758 * @opcode 0xd0
8759 */
8760FNIEMOP_DEF(iemOp_Grp2_Eb_1)
8761{
8762 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8763 PCIEMOPSHIFTSIZES pImpl;
8764 switch (IEM_GET_MODRM_REG_8(bRm))
8765 {
8766 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
8767 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
8768 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
8769 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
8770 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
8771 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
8772 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
8773 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8774 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8775 }
8776 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8777
8778 if (IEM_IS_MODRM_REG_MODE(bRm))
8779 {
8780 /* register */
8781 IEM_MC_BEGIN(3, 0, 0, 0);
8782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8783 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8784 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8785 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8786 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8787 IEM_MC_REF_EFLAGS(pEFlags);
8788 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8789 IEM_MC_ADVANCE_RIP_AND_FINISH();
8790 IEM_MC_END();
8791 }
8792 else
8793 {
8794 /* memory */
8795 IEM_MC_BEGIN(3, 3, 0, 0);
8796 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8797 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8798 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8799 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8800 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8801
8802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8804 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8805 IEM_MC_FETCH_EFLAGS(EFlags);
8806 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8807
8808 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8809 IEM_MC_COMMIT_EFLAGS(EFlags);
8810 IEM_MC_ADVANCE_RIP_AND_FINISH();
8811 IEM_MC_END();
8812 }
8813}
8814
8815
8816
8817/**
8818 * @opcode 0xd1
8819 */
8820FNIEMOP_DEF(iemOp_Grp2_Ev_1)
8821{
8822 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8823 PCIEMOPSHIFTSIZES pImpl;
8824 switch (IEM_GET_MODRM_REG_8(bRm))
8825 {
8826 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
8827 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
8828 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
8829 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
8830 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
8831 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
8832 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
8833 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8834 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8835 }
8836 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8837
8838 if (IEM_IS_MODRM_REG_MODE(bRm))
8839 {
8840 /* register */
8841 switch (pVCpu->iem.s.enmEffOpSize)
8842 {
8843 case IEMMODE_16BIT:
8844 IEM_MC_BEGIN(3, 0, 0, 0);
8845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8846 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8847 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8848 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8849 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8850 IEM_MC_REF_EFLAGS(pEFlags);
8851 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8852 IEM_MC_ADVANCE_RIP_AND_FINISH();
8853 IEM_MC_END();
8854 break;
8855
8856 case IEMMODE_32BIT:
8857 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8859 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8860 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8861 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8862 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8863 IEM_MC_REF_EFLAGS(pEFlags);
8864 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8865 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
8866 IEM_MC_ADVANCE_RIP_AND_FINISH();
8867 IEM_MC_END();
8868 break;
8869
8870 case IEMMODE_64BIT:
8871 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8873 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8874 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8875 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8876 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8877 IEM_MC_REF_EFLAGS(pEFlags);
8878 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8879 IEM_MC_ADVANCE_RIP_AND_FINISH();
8880 IEM_MC_END();
8881 break;
8882
8883 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8884 }
8885 }
8886 else
8887 {
8888 /* memory */
8889 switch (pVCpu->iem.s.enmEffOpSize)
8890 {
8891 case IEMMODE_16BIT:
8892 IEM_MC_BEGIN(3, 3, 0, 0);
8893 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8894 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8895 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8897 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8898
8899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8901 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8902 IEM_MC_FETCH_EFLAGS(EFlags);
8903 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8904
8905 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
8906 IEM_MC_COMMIT_EFLAGS(EFlags);
8907 IEM_MC_ADVANCE_RIP_AND_FINISH();
8908 IEM_MC_END();
8909 break;
8910
8911 case IEMMODE_32BIT:
8912 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8913 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8914 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8915 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8916 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8917 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8918
8919 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8921 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8922 IEM_MC_FETCH_EFLAGS(EFlags);
8923 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8924
8925 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
8926 IEM_MC_COMMIT_EFLAGS(EFlags);
8927 IEM_MC_ADVANCE_RIP_AND_FINISH();
8928 IEM_MC_END();
8929 break;
8930
8931 case IEMMODE_64BIT:
8932 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8933 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8934 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8935 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8937 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8938
8939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8941 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8942 IEM_MC_FETCH_EFLAGS(EFlags);
8943 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8944
8945 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
8946 IEM_MC_COMMIT_EFLAGS(EFlags);
8947 IEM_MC_ADVANCE_RIP_AND_FINISH();
8948 IEM_MC_END();
8949 break;
8950
8951 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8952 }
8953 }
8954}
8955
8956
8957/**
8958 * @opcode 0xd2
8959 */
8960FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
8961{
8962 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8963 PCIEMOPSHIFTSIZES pImpl;
8964 switch (IEM_GET_MODRM_REG_8(bRm))
8965 {
8966 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
8967 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
8968 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
8969 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
8970 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
8971 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
8972 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
8973 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8974 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
8975 }
8976 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8977
8978 if (IEM_IS_MODRM_REG_MODE(bRm))
8979 {
8980 /* register */
8981 IEM_MC_BEGIN(3, 0, 0, 0);
8982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8983 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8984 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8985 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8986 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8987 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8988 IEM_MC_REF_EFLAGS(pEFlags);
8989 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8990 IEM_MC_ADVANCE_RIP_AND_FINISH();
8991 IEM_MC_END();
8992 }
8993 else
8994 {
8995 /* memory */
8996 IEM_MC_BEGIN(3, 3, 0, 0);
8997 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8998 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8999 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9001 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9002
9003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9005 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9006 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9007 IEM_MC_FETCH_EFLAGS(EFlags);
9008 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
9009
9010 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
9011 IEM_MC_COMMIT_EFLAGS(EFlags);
9012 IEM_MC_ADVANCE_RIP_AND_FINISH();
9013 IEM_MC_END();
9014 }
9015}
9016
9017
9018/**
9019 * @opcode 0xd3
9020 */
9021FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
9022{
9023 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9024 PCIEMOPSHIFTSIZES pImpl;
9025 switch (IEM_GET_MODRM_REG_8(bRm))
9026 {
9027 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
9028 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
9029 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
9030 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
9031 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
9032 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
9033 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
9034 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9035 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
9036 }
9037 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
9038
9039 if (IEM_IS_MODRM_REG_MODE(bRm))
9040 {
9041 /* register */
9042 switch (pVCpu->iem.s.enmEffOpSize)
9043 {
9044 case IEMMODE_16BIT:
9045 IEM_MC_BEGIN(3, 0, 0, 0);
9046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9047 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9048 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9049 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9050 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9051 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9052 IEM_MC_REF_EFLAGS(pEFlags);
9053 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
9054 IEM_MC_ADVANCE_RIP_AND_FINISH();
9055 IEM_MC_END();
9056 break;
9057
9058 case IEMMODE_32BIT:
9059 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
9060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9061 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9062 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9063 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9064 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9065 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9066 IEM_MC_REF_EFLAGS(pEFlags);
9067 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
9068 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
9069 IEM_MC_ADVANCE_RIP_AND_FINISH();
9070 IEM_MC_END();
9071 break;
9072
9073 case IEMMODE_64BIT:
9074 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
9075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9076 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9077 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9078 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9079 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9080 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9081 IEM_MC_REF_EFLAGS(pEFlags);
9082 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
9083 IEM_MC_ADVANCE_RIP_AND_FINISH();
9084 IEM_MC_END();
9085 break;
9086
9087 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9088 }
9089 }
9090 else
9091 {
9092 /* memory */
9093 switch (pVCpu->iem.s.enmEffOpSize)
9094 {
9095 case IEMMODE_16BIT:
9096 IEM_MC_BEGIN(3, 3, 0, 0);
9097 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9098 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9099 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9101 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9102
9103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9105 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9106 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9107 IEM_MC_FETCH_EFLAGS(EFlags);
9108 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
9109
9110 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
9111 IEM_MC_COMMIT_EFLAGS(EFlags);
9112 IEM_MC_ADVANCE_RIP_AND_FINISH();
9113 IEM_MC_END();
9114 break;
9115
9116 case IEMMODE_32BIT:
9117 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
9118 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9119 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9120 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9121 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9122 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9123
9124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9126 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9127 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9128 IEM_MC_FETCH_EFLAGS(EFlags);
9129 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
9130
9131 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
9132 IEM_MC_COMMIT_EFLAGS(EFlags);
9133 IEM_MC_ADVANCE_RIP_AND_FINISH();
9134 IEM_MC_END();
9135 break;
9136
9137 case IEMMODE_64BIT:
9138 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
9139 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9140 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9141 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9143 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9144
9145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9147 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9148 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9149 IEM_MC_FETCH_EFLAGS(EFlags);
9150 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
9151
9152 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
9153 IEM_MC_COMMIT_EFLAGS(EFlags);
9154 IEM_MC_ADVANCE_RIP_AND_FINISH();
9155 IEM_MC_END();
9156 break;
9157
9158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9159 }
9160 }
9161}
9162
9163/**
9164 * @opcode 0xd4
9165 */
9166FNIEMOP_DEF(iemOp_aam_Ib)
9167{
9168 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
9169 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9171 IEMOP_HLP_NO_64BIT();
9172 if (!bImm)
9173 IEMOP_RAISE_DIVIDE_ERROR_RET();
9174 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aam, bImm);
9175}
9176
9177
9178/**
9179 * @opcode 0xd5
9180 */
9181FNIEMOP_DEF(iemOp_aad_Ib)
9182{
9183 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
9184 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9186 IEMOP_HLP_NO_64BIT();
9187 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aad, bImm);
9188}
9189
9190
9191/**
9192 * @opcode 0xd6
9193 */
9194FNIEMOP_DEF(iemOp_salc)
9195{
9196 IEMOP_MNEMONIC(salc, "salc");
9197 IEMOP_HLP_NO_64BIT();
9198
9199 IEM_MC_BEGIN(0, 0, 0, 0);
9200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9201 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9202 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
9203 } IEM_MC_ELSE() {
9204 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
9205 } IEM_MC_ENDIF();
9206 IEM_MC_ADVANCE_RIP_AND_FINISH();
9207 IEM_MC_END();
9208}
9209
9210
9211/**
9212 * @opcode 0xd7
9213 */
9214FNIEMOP_DEF(iemOp_xlat)
9215{
9216 IEMOP_MNEMONIC(xlat, "xlat");
9217 switch (pVCpu->iem.s.enmEffAddrMode)
9218 {
9219 case IEMMODE_16BIT:
9220 IEM_MC_BEGIN(2, 0, 0, 0);
9221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9222 IEM_MC_LOCAL(uint8_t, u8Tmp);
9223 IEM_MC_LOCAL(uint16_t, u16Addr);
9224 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
9225 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
9226 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
9227 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9228 IEM_MC_ADVANCE_RIP_AND_FINISH();
9229 IEM_MC_END();
9230 break;
9231
9232 case IEMMODE_32BIT:
9233 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
9234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9235 IEM_MC_LOCAL(uint8_t, u8Tmp);
9236 IEM_MC_LOCAL(uint32_t, u32Addr);
9237 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
9238 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
9239 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
9240 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9241 IEM_MC_ADVANCE_RIP_AND_FINISH();
9242 IEM_MC_END();
9243 break;
9244
9245 case IEMMODE_64BIT:
9246 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
9247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9248 IEM_MC_LOCAL(uint8_t, u8Tmp);
9249 IEM_MC_LOCAL(uint64_t, u64Addr);
9250 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
9251 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
9252 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
9253 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9254 IEM_MC_ADVANCE_RIP_AND_FINISH();
9255 IEM_MC_END();
9256 break;
9257
9258 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9259 }
9260}
9261
9262
9263/**
9264 * Common worker for FPU instructions working on ST0 and STn, and storing the
9265 * result in ST0.
9266 *
9267 * @param bRm Mod R/M byte.
9268 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9269 */
9270FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9271{
9272 IEM_MC_BEGIN(3, 1, 0, 0);
9273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9274 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9275 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9276 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9277 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9278
9279 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9280 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9281 IEM_MC_PREPARE_FPU_USAGE();
9282 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9283 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9284 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9285 } IEM_MC_ELSE() {
9286 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9287 } IEM_MC_ENDIF();
9288 IEM_MC_ADVANCE_RIP_AND_FINISH();
9289
9290 IEM_MC_END();
9291}
9292
9293
9294/**
9295 * Common worker for FPU instructions working on ST0 and STn, and only affecting
9296 * flags.
9297 *
9298 * @param bRm Mod R/M byte.
9299 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9300 */
9301FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9302{
9303 IEM_MC_BEGIN(3, 1, 0, 0);
9304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9305 IEM_MC_LOCAL(uint16_t, u16Fsw);
9306 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9307 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9308 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9309
9310 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9311 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9312 IEM_MC_PREPARE_FPU_USAGE();
9313 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9314 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9315 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9316 } IEM_MC_ELSE() {
9317 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9318 } IEM_MC_ENDIF();
9319 IEM_MC_ADVANCE_RIP_AND_FINISH();
9320
9321 IEM_MC_END();
9322}
9323
9324
9325/**
9326 * Common worker for FPU instructions working on ST0 and STn, only affecting
9327 * flags, and popping when done.
9328 *
9329 * @param bRm Mod R/M byte.
9330 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9331 */
9332FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9333{
9334 IEM_MC_BEGIN(3, 1, 0, 0);
9335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9336 IEM_MC_LOCAL(uint16_t, u16Fsw);
9337 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9338 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9339 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9340
9341 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9342 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9343 IEM_MC_PREPARE_FPU_USAGE();
9344 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9345 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9346 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9347 } IEM_MC_ELSE() {
9348 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9349 } IEM_MC_ENDIF();
9350 IEM_MC_ADVANCE_RIP_AND_FINISH();
9351
9352 IEM_MC_END();
9353}
9354
9355
9356/** Opcode 0xd8 11/0. */
9357FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
9358{
9359 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
9360 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
9361}
9362
9363
9364/** Opcode 0xd8 11/1. */
9365FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
9366{
9367 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
9368 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
9369}
9370
9371
9372/** Opcode 0xd8 11/2. */
9373FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
9374{
9375 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
9376 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
9377}
9378
9379
9380/** Opcode 0xd8 11/3. */
9381FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
9382{
9383 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
9384 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
9385}
9386
9387
9388/** Opcode 0xd8 11/4. */
9389FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
9390{
9391 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
9392 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
9393}
9394
9395
9396/** Opcode 0xd8 11/5. */
9397FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
9398{
9399 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
9400 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
9401}
9402
9403
9404/** Opcode 0xd8 11/6. */
9405FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
9406{
9407 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
9408 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
9409}
9410
9411
9412/** Opcode 0xd8 11/7. */
9413FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
9414{
9415 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
9416 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
9417}
9418
9419
9420/**
9421 * Common worker for FPU instructions working on ST0 and an m32r, and storing
9422 * the result in ST0.
9423 *
9424 * @param bRm Mod R/M byte.
9425 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9426 */
9427FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
9428{
9429 IEM_MC_BEGIN(3, 3, 0, 0);
9430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9431 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9432 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9433 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9434 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9435 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9436
9437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9439
9440 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9441 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9442 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9443
9444 IEM_MC_PREPARE_FPU_USAGE();
9445 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9446 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
9447 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9448 } IEM_MC_ELSE() {
9449 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9450 } IEM_MC_ENDIF();
9451 IEM_MC_ADVANCE_RIP_AND_FINISH();
9452
9453 IEM_MC_END();
9454}
9455
9456
9457/** Opcode 0xd8 !11/0. */
9458FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
9459{
9460 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
9461 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
9462}
9463
9464
9465/** Opcode 0xd8 !11/1. */
9466FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
9467{
9468 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
9469 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
9470}
9471
9472
9473/** Opcode 0xd8 !11/2. */
9474FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
9475{
9476 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
9477
9478 IEM_MC_BEGIN(3, 3, 0, 0);
9479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9480 IEM_MC_LOCAL(uint16_t, u16Fsw);
9481 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9482 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9483 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9484 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9485
9486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9488
9489 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9490 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9491 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9492
9493 IEM_MC_PREPARE_FPU_USAGE();
9494 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9495 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
9496 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9497 } IEM_MC_ELSE() {
9498 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9499 } IEM_MC_ENDIF();
9500 IEM_MC_ADVANCE_RIP_AND_FINISH();
9501
9502 IEM_MC_END();
9503}
9504
9505
9506/** Opcode 0xd8 !11/3. */
9507FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
9508{
9509 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
9510
9511 IEM_MC_BEGIN(3, 3, 0, 0);
9512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9513 IEM_MC_LOCAL(uint16_t, u16Fsw);
9514 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9515 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9516 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9517 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9518
9519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9521
9522 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9523 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9524 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9525
9526 IEM_MC_PREPARE_FPU_USAGE();
9527 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9528 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
9529 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9530 } IEM_MC_ELSE() {
9531 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9532 } IEM_MC_ENDIF();
9533 IEM_MC_ADVANCE_RIP_AND_FINISH();
9534
9535 IEM_MC_END();
9536}
9537
9538
9539/** Opcode 0xd8 !11/4. */
9540FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
9541{
9542 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
9543 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
9544}
9545
9546
9547/** Opcode 0xd8 !11/5. */
9548FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
9549{
9550 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
9551 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
9552}
9553
9554
9555/** Opcode 0xd8 !11/6. */
9556FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
9557{
9558 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
9559 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
9560}
9561
9562
9563/** Opcode 0xd8 !11/7. */
9564FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
9565{
9566 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
9567 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
9568}
9569
9570
9571/**
9572 * @opcode 0xd8
9573 */
9574FNIEMOP_DEF(iemOp_EscF0)
9575{
9576 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9577 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
9578
9579 if (IEM_IS_MODRM_REG_MODE(bRm))
9580 {
9581 switch (IEM_GET_MODRM_REG_8(bRm))
9582 {
9583 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
9584 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
9585 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
9586 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9587 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
9588 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
9589 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
9590 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
9591 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9592 }
9593 }
9594 else
9595 {
9596 switch (IEM_GET_MODRM_REG_8(bRm))
9597 {
9598 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
9599 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
9600 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
9601 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
9602 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
9603 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
9604 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
9605 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
9606 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9607 }
9608 }
9609}
9610
9611
9612/** Opcode 0xd9 /0 mem32real
9613 * @sa iemOp_fld_m64r */
9614FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
9615{
9616 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
9617
9618 IEM_MC_BEGIN(2, 3, 0, 0);
9619 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9620 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9621 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
9622 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9623 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
9624
9625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9627
9628 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9629 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9630 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9631 IEM_MC_PREPARE_FPU_USAGE();
9632 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9633 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
9634 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9635 } IEM_MC_ELSE() {
9636 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9637 } IEM_MC_ENDIF();
9638 IEM_MC_ADVANCE_RIP_AND_FINISH();
9639
9640 IEM_MC_END();
9641}
9642
9643
9644/** Opcode 0xd9 !11/2 mem32real */
9645FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
9646{
9647 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
9648 IEM_MC_BEGIN(3, 2, 0, 0);
9649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9650 IEM_MC_LOCAL(uint16_t, u16Fsw);
9651 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9652 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9653 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9654
9655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9657 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9658 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9659
9660 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9661 IEM_MC_PREPARE_FPU_USAGE();
9662 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9663 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9664 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9665 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9666 } IEM_MC_ELSE() {
9667 IEM_MC_IF_FCW_IM() {
9668 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9669 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9670 } IEM_MC_ENDIF();
9671 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9672 } IEM_MC_ENDIF();
9673 IEM_MC_ADVANCE_RIP_AND_FINISH();
9674
9675 IEM_MC_END();
9676}
9677
9678
9679/** Opcode 0xd9 !11/3 */
9680FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
9681{
9682 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
9683 IEM_MC_BEGIN(3, 2, 0, 0);
9684 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9685 IEM_MC_LOCAL(uint16_t, u16Fsw);
9686 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9687 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9688 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9689
9690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9692 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9693 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9694
9695 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9696 IEM_MC_PREPARE_FPU_USAGE();
9697 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9698 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9699 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9700 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9701 } IEM_MC_ELSE() {
9702 IEM_MC_IF_FCW_IM() {
9703 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9704 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9705 } IEM_MC_ENDIF();
9706 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9707 } IEM_MC_ENDIF();
9708 IEM_MC_ADVANCE_RIP_AND_FINISH();
9709
9710 IEM_MC_END();
9711}
9712
9713
9714/** Opcode 0xd9 !11/4 */
9715FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
9716{
9717 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
9718 IEM_MC_BEGIN(3, 0, 0, 0);
9719 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9721
9722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9723 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9724 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9725
9726 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9727 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
9728 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9729 IEM_MC_END();
9730}
9731
9732
9733/** Opcode 0xd9 !11/5 */
9734FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
9735{
9736 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
9737 IEM_MC_BEGIN(1, 1, 0, 0);
9738 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9740
9741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9742 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9743 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9744
9745 IEM_MC_ARG(uint16_t, u16Fsw, 0);
9746 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9747
9748 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, 0, iemCImpl_fldcw, u16Fsw);
9749 IEM_MC_END();
9750}
9751
9752
9753/** Opcode 0xd9 !11/6 */
9754FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
9755{
9756 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
9757 IEM_MC_BEGIN(3, 0, 0, 0);
9758 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9760
9761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9762 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9763 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9764
9765 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9766 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
9767 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
9768 IEM_MC_END();
9769}
9770
9771
9772/** Opcode 0xd9 !11/7 */
9773FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
9774{
9775 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
9776 IEM_MC_BEGIN(2, 0, 0, 0);
9777 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9778 IEM_MC_LOCAL(uint16_t, u16Fcw);
9779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9781 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9782 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9783 IEM_MC_FETCH_FCW(u16Fcw);
9784 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
9785 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9786 IEM_MC_END();
9787}
9788
9789
9790/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
9791FNIEMOP_DEF(iemOp_fnop)
9792{
9793 IEMOP_MNEMONIC(fnop, "fnop");
9794 IEM_MC_BEGIN(0, 0, 0, 0);
9795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9796 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9797 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9798 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9799 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
9800 * intel optimizations. Investigate. */
9801 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9802 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9803 IEM_MC_END();
9804}
9805
9806
9807/** Opcode 0xd9 11/0 stN */
9808FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
9809{
9810 IEMOP_MNEMONIC(fld_stN, "fld stN");
9811 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9812 * indicates that it does. */
9813 IEM_MC_BEGIN(0, 2, 0, 0);
9814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9815 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9816 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9817 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9818 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9819
9820 IEM_MC_PREPARE_FPU_USAGE();
9821 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
9822 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9823 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9824 } IEM_MC_ELSE() {
9825 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
9826 } IEM_MC_ENDIF();
9827
9828 IEM_MC_ADVANCE_RIP_AND_FINISH();
9829 IEM_MC_END();
9830}
9831
9832
9833/** Opcode 0xd9 11/3 stN */
9834FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
9835{
9836 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
9837 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9838 * indicates that it does. */
9839 IEM_MC_BEGIN(2, 3, 0, 0);
9840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9841 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
9842 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
9843 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9844 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
9845 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
9846 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9847 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9848
9849 IEM_MC_PREPARE_FPU_USAGE();
9850 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9851 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
9852 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
9853 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9854 } IEM_MC_ELSE() {
9855 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
9856 } IEM_MC_ENDIF();
9857
9858 IEM_MC_ADVANCE_RIP_AND_FINISH();
9859 IEM_MC_END();
9860}
9861
9862
9863/** Opcode 0xd9 11/4, 0xdd 11/2. */
9864FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
9865{
9866 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
9867
9868 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
9869 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
9870 if (!iDstReg)
9871 {
9872 IEM_MC_BEGIN(0, 1, 0, 0);
9873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9874 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
9875 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9876 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9877
9878 IEM_MC_PREPARE_FPU_USAGE();
9879 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
9880 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9881 } IEM_MC_ELSE() {
9882 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
9883 } IEM_MC_ENDIF();
9884
9885 IEM_MC_ADVANCE_RIP_AND_FINISH();
9886 IEM_MC_END();
9887 }
9888 else
9889 {
9890 IEM_MC_BEGIN(0, 2, 0, 0);
9891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9892 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9893 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9894 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9895 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9896
9897 IEM_MC_PREPARE_FPU_USAGE();
9898 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9899 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9900 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
9901 } IEM_MC_ELSE() {
9902 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
9903 } IEM_MC_ENDIF();
9904
9905 IEM_MC_ADVANCE_RIP_AND_FINISH();
9906 IEM_MC_END();
9907 }
9908}
9909
9910
9911/**
9912 * Common worker for FPU instructions working on ST0 and replaces it with the
9913 * result, i.e. unary operators.
9914 *
9915 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9916 */
9917FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
9918{
9919 IEM_MC_BEGIN(2, 1, 0, 0);
9920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9921 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9922 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9923 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9924
9925 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9926 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9927 IEM_MC_PREPARE_FPU_USAGE();
9928 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9929 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
9930 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9931 } IEM_MC_ELSE() {
9932 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9933 } IEM_MC_ENDIF();
9934 IEM_MC_ADVANCE_RIP_AND_FINISH();
9935
9936 IEM_MC_END();
9937}
9938
9939
9940/** Opcode 0xd9 0xe0. */
9941FNIEMOP_DEF(iemOp_fchs)
9942{
9943 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
9944 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
9945}
9946
9947
9948/** Opcode 0xd9 0xe1. */
9949FNIEMOP_DEF(iemOp_fabs)
9950{
9951 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
9952 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
9953}
9954
9955
9956/** Opcode 0xd9 0xe4. */
9957FNIEMOP_DEF(iemOp_ftst)
9958{
9959 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
9960 IEM_MC_BEGIN(2, 1, 0, 0);
9961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9962 IEM_MC_LOCAL(uint16_t, u16Fsw);
9963 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9964 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9965
9966 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9967 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9968 IEM_MC_PREPARE_FPU_USAGE();
9969 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9970 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
9971 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9972 } IEM_MC_ELSE() {
9973 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9974 } IEM_MC_ENDIF();
9975 IEM_MC_ADVANCE_RIP_AND_FINISH();
9976
9977 IEM_MC_END();
9978}
9979
9980
9981/** Opcode 0xd9 0xe5. */
9982FNIEMOP_DEF(iemOp_fxam)
9983{
9984 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
9985 IEM_MC_BEGIN(2, 1, 0, 0);
9986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9987 IEM_MC_LOCAL(uint16_t, u16Fsw);
9988 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9989 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9990
9991 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9992 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9993 IEM_MC_PREPARE_FPU_USAGE();
9994 IEM_MC_REF_FPUREG(pr80Value, 0);
9995 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
9996 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9997 IEM_MC_ADVANCE_RIP_AND_FINISH();
9998
9999 IEM_MC_END();
10000}
10001
10002
10003/**
10004 * Common worker for FPU instructions pushing a constant onto the FPU stack.
10005 *
10006 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10007 */
10008FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
10009{
10010 IEM_MC_BEGIN(1, 1, 0, 0);
10011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10012 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10013 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10014
10015 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10016 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10017 IEM_MC_PREPARE_FPU_USAGE();
10018 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10019 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
10020 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10021 } IEM_MC_ELSE() {
10022 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
10023 } IEM_MC_ENDIF();
10024 IEM_MC_ADVANCE_RIP_AND_FINISH();
10025
10026 IEM_MC_END();
10027}
10028
10029
10030/** Opcode 0xd9 0xe8. */
10031FNIEMOP_DEF(iemOp_fld1)
10032{
10033 IEMOP_MNEMONIC(fld1, "fld1");
10034 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
10035}
10036
10037
10038/** Opcode 0xd9 0xe9. */
10039FNIEMOP_DEF(iemOp_fldl2t)
10040{
10041 IEMOP_MNEMONIC(fldl2t, "fldl2t");
10042 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
10043}
10044
10045
10046/** Opcode 0xd9 0xea. */
10047FNIEMOP_DEF(iemOp_fldl2e)
10048{
10049 IEMOP_MNEMONIC(fldl2e, "fldl2e");
10050 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
10051}
10052
10053/** Opcode 0xd9 0xeb. */
10054FNIEMOP_DEF(iemOp_fldpi)
10055{
10056 IEMOP_MNEMONIC(fldpi, "fldpi");
10057 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
10058}
10059
10060
10061/** Opcode 0xd9 0xec. */
10062FNIEMOP_DEF(iemOp_fldlg2)
10063{
10064 IEMOP_MNEMONIC(fldlg2, "fldlg2");
10065 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
10066}
10067
10068/** Opcode 0xd9 0xed. */
10069FNIEMOP_DEF(iemOp_fldln2)
10070{
10071 IEMOP_MNEMONIC(fldln2, "fldln2");
10072 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
10073}
10074
10075
10076/** Opcode 0xd9 0xee. */
10077FNIEMOP_DEF(iemOp_fldz)
10078{
10079 IEMOP_MNEMONIC(fldz, "fldz");
10080 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
10081}
10082
10083
10084/** Opcode 0xd9 0xf0.
10085 *
10086 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
10087 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
10088 * to produce proper results for +Inf and -Inf.
10089 *
10090 * This is probably usful in the implementation pow() and similar.
10091 */
10092FNIEMOP_DEF(iemOp_f2xm1)
10093{
10094 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
10095 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
10096}
10097
10098
10099/**
10100 * Common worker for FPU instructions working on STn and ST0, storing the result
10101 * in STn, and popping the stack unless IE, DE or ZE was raised.
10102 *
10103 * @param bRm Mod R/M byte.
10104 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10105 */
10106FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10107{
10108 IEM_MC_BEGIN(3, 1, 0, 0);
10109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10110 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10111 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10112 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10113 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10114
10115 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10116 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10117
10118 IEM_MC_PREPARE_FPU_USAGE();
10119 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10120 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10121 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10122 } IEM_MC_ELSE() {
10123 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10124 } IEM_MC_ENDIF();
10125 IEM_MC_ADVANCE_RIP_AND_FINISH();
10126
10127 IEM_MC_END();
10128}
10129
10130
10131/** Opcode 0xd9 0xf1. */
10132FNIEMOP_DEF(iemOp_fyl2x)
10133{
10134 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
10135 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
10136}
10137
10138
10139/**
10140 * Common worker for FPU instructions working on ST0 and having two outputs, one
10141 * replacing ST0 and one pushed onto the stack.
10142 *
10143 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10144 */
10145FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
10146{
10147 IEM_MC_BEGIN(2, 1, 0, 0);
10148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10149 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
10150 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
10151 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10152
10153 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10154 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10155 IEM_MC_PREPARE_FPU_USAGE();
10156 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10157 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
10158 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
10159 } IEM_MC_ELSE() {
10160 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
10161 } IEM_MC_ENDIF();
10162 IEM_MC_ADVANCE_RIP_AND_FINISH();
10163
10164 IEM_MC_END();
10165}
10166
10167
10168/** Opcode 0xd9 0xf2. */
10169FNIEMOP_DEF(iemOp_fptan)
10170{
10171 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
10172 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
10173}
10174
10175
10176/** Opcode 0xd9 0xf3. */
10177FNIEMOP_DEF(iemOp_fpatan)
10178{
10179 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
10180 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
10181}
10182
10183
10184/** Opcode 0xd9 0xf4. */
10185FNIEMOP_DEF(iemOp_fxtract)
10186{
10187 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
10188 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
10189}
10190
10191
10192/** Opcode 0xd9 0xf5. */
10193FNIEMOP_DEF(iemOp_fprem1)
10194{
10195 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
10196 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
10197}
10198
10199
10200/** Opcode 0xd9 0xf6. */
10201FNIEMOP_DEF(iemOp_fdecstp)
10202{
10203 IEMOP_MNEMONIC(fdecstp, "fdecstp");
10204 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10205 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10206 * FINCSTP and FDECSTP. */
10207 IEM_MC_BEGIN(0, 0, 0, 0);
10208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10209
10210 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10211 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10212
10213 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10214 IEM_MC_FPU_STACK_DEC_TOP();
10215 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10216
10217 IEM_MC_ADVANCE_RIP_AND_FINISH();
10218 IEM_MC_END();
10219}
10220
10221
10222/** Opcode 0xd9 0xf7. */
10223FNIEMOP_DEF(iemOp_fincstp)
10224{
10225 IEMOP_MNEMONIC(fincstp, "fincstp");
10226 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10227 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10228 * FINCSTP and FDECSTP. */
10229 IEM_MC_BEGIN(0, 0, 0, 0);
10230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10231
10232 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10233 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10234
10235 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10236 IEM_MC_FPU_STACK_INC_TOP();
10237 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10238
10239 IEM_MC_ADVANCE_RIP_AND_FINISH();
10240 IEM_MC_END();
10241}
10242
10243
10244/** Opcode 0xd9 0xf8. */
10245FNIEMOP_DEF(iemOp_fprem)
10246{
10247 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
10248 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
10249}
10250
10251
10252/** Opcode 0xd9 0xf9. */
10253FNIEMOP_DEF(iemOp_fyl2xp1)
10254{
10255 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
10256 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
10257}
10258
10259
10260/** Opcode 0xd9 0xfa. */
10261FNIEMOP_DEF(iemOp_fsqrt)
10262{
10263 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
10264 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
10265}
10266
10267
10268/** Opcode 0xd9 0xfb. */
10269FNIEMOP_DEF(iemOp_fsincos)
10270{
10271 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
10272 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
10273}
10274
10275
10276/** Opcode 0xd9 0xfc. */
10277FNIEMOP_DEF(iemOp_frndint)
10278{
10279 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
10280 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
10281}
10282
10283
10284/** Opcode 0xd9 0xfd. */
10285FNIEMOP_DEF(iemOp_fscale)
10286{
10287 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
10288 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
10289}
10290
10291
10292/** Opcode 0xd9 0xfe. */
10293FNIEMOP_DEF(iemOp_fsin)
10294{
10295 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
10296 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
10297}
10298
10299
10300/** Opcode 0xd9 0xff. */
10301FNIEMOP_DEF(iemOp_fcos)
10302{
10303 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
10304 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
10305}
10306
10307
10308/** Used by iemOp_EscF1. */
10309IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
10310{
10311 /* 0xe0 */ iemOp_fchs,
10312 /* 0xe1 */ iemOp_fabs,
10313 /* 0xe2 */ iemOp_Invalid,
10314 /* 0xe3 */ iemOp_Invalid,
10315 /* 0xe4 */ iemOp_ftst,
10316 /* 0xe5 */ iemOp_fxam,
10317 /* 0xe6 */ iemOp_Invalid,
10318 /* 0xe7 */ iemOp_Invalid,
10319 /* 0xe8 */ iemOp_fld1,
10320 /* 0xe9 */ iemOp_fldl2t,
10321 /* 0xea */ iemOp_fldl2e,
10322 /* 0xeb */ iemOp_fldpi,
10323 /* 0xec */ iemOp_fldlg2,
10324 /* 0xed */ iemOp_fldln2,
10325 /* 0xee */ iemOp_fldz,
10326 /* 0xef */ iemOp_Invalid,
10327 /* 0xf0 */ iemOp_f2xm1,
10328 /* 0xf1 */ iemOp_fyl2x,
10329 /* 0xf2 */ iemOp_fptan,
10330 /* 0xf3 */ iemOp_fpatan,
10331 /* 0xf4 */ iemOp_fxtract,
10332 /* 0xf5 */ iemOp_fprem1,
10333 /* 0xf6 */ iemOp_fdecstp,
10334 /* 0xf7 */ iemOp_fincstp,
10335 /* 0xf8 */ iemOp_fprem,
10336 /* 0xf9 */ iemOp_fyl2xp1,
10337 /* 0xfa */ iemOp_fsqrt,
10338 /* 0xfb */ iemOp_fsincos,
10339 /* 0xfc */ iemOp_frndint,
10340 /* 0xfd */ iemOp_fscale,
10341 /* 0xfe */ iemOp_fsin,
10342 /* 0xff */ iemOp_fcos
10343};
10344
10345
10346/**
10347 * @opcode 0xd9
10348 */
10349FNIEMOP_DEF(iemOp_EscF1)
10350{
10351 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10352 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
10353
10354 if (IEM_IS_MODRM_REG_MODE(bRm))
10355 {
10356 switch (IEM_GET_MODRM_REG_8(bRm))
10357 {
10358 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
10359 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
10360 case 2:
10361 if (bRm == 0xd0)
10362 return FNIEMOP_CALL(iemOp_fnop);
10363 IEMOP_RAISE_INVALID_OPCODE_RET();
10364 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
10365 case 4:
10366 case 5:
10367 case 6:
10368 case 7:
10369 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
10370 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
10371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10372 }
10373 }
10374 else
10375 {
10376 switch (IEM_GET_MODRM_REG_8(bRm))
10377 {
10378 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
10379 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
10380 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
10381 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
10382 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
10383 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
10384 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
10385 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
10386 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10387 }
10388 }
10389}
10390
10391
10392/** Opcode 0xda 11/0. */
10393FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
10394{
10395 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
10396 IEM_MC_BEGIN(0, 1, 0, 0);
10397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10398 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10399
10400 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10401 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10402
10403 IEM_MC_PREPARE_FPU_USAGE();
10404 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10405 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10406 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10407 } IEM_MC_ENDIF();
10408 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10409 } IEM_MC_ELSE() {
10410 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10411 } IEM_MC_ENDIF();
10412 IEM_MC_ADVANCE_RIP_AND_FINISH();
10413
10414 IEM_MC_END();
10415}
10416
10417
10418/** Opcode 0xda 11/1. */
10419FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
10420{
10421 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
10422 IEM_MC_BEGIN(0, 1, 0, 0);
10423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10424 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10425
10426 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10427 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10428
10429 IEM_MC_PREPARE_FPU_USAGE();
10430 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10431 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10432 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10433 } IEM_MC_ENDIF();
10434 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10435 } IEM_MC_ELSE() {
10436 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10437 } IEM_MC_ENDIF();
10438 IEM_MC_ADVANCE_RIP_AND_FINISH();
10439
10440 IEM_MC_END();
10441}
10442
10443
10444/** Opcode 0xda 11/2. */
10445FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
10446{
10447 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
10448 IEM_MC_BEGIN(0, 1, 0, 0);
10449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10450 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10451
10452 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10453 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10454
10455 IEM_MC_PREPARE_FPU_USAGE();
10456 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10457 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10458 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10459 } IEM_MC_ENDIF();
10460 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10461 } IEM_MC_ELSE() {
10462 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10463 } IEM_MC_ENDIF();
10464 IEM_MC_ADVANCE_RIP_AND_FINISH();
10465
10466 IEM_MC_END();
10467}
10468
10469
10470/** Opcode 0xda 11/3. */
10471FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
10472{
10473 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
10474 IEM_MC_BEGIN(0, 1, 0, 0);
10475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10476 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10477
10478 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10479 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10480
10481 IEM_MC_PREPARE_FPU_USAGE();
10482 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10483 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10484 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10485 } IEM_MC_ENDIF();
10486 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10487 } IEM_MC_ELSE() {
10488 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10489 } IEM_MC_ENDIF();
10490 IEM_MC_ADVANCE_RIP_AND_FINISH();
10491
10492 IEM_MC_END();
10493}
10494
10495
10496/**
10497 * Common worker for FPU instructions working on ST0 and ST1, only affecting
10498 * flags, and popping twice when done.
10499 *
10500 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10501 */
10502FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10503{
10504 IEM_MC_BEGIN(3, 1, 0, 0);
10505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10506 IEM_MC_LOCAL(uint16_t, u16Fsw);
10507 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10508 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10509 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10510
10511 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10512 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10513
10514 IEM_MC_PREPARE_FPU_USAGE();
10515 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
10516 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10517 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10518 } IEM_MC_ELSE() {
10519 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
10520 } IEM_MC_ENDIF();
10521 IEM_MC_ADVANCE_RIP_AND_FINISH();
10522
10523 IEM_MC_END();
10524}
10525
10526
10527/** Opcode 0xda 0xe9. */
10528FNIEMOP_DEF(iemOp_fucompp)
10529{
10530 IEMOP_MNEMONIC(fucompp, "fucompp");
10531 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
10532}
10533
10534
10535/**
10536 * Common worker for FPU instructions working on ST0 and an m32i, and storing
10537 * the result in ST0.
10538 *
10539 * @param bRm Mod R/M byte.
10540 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10541 */
10542FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
10543{
10544 IEM_MC_BEGIN(3, 3, 0, 0);
10545 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10546 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10547 IEM_MC_LOCAL(int32_t, i32Val2);
10548 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10549 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10550 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10551
10552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10554
10555 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10556 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10557 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10558
10559 IEM_MC_PREPARE_FPU_USAGE();
10560 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10561 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
10562 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10563 } IEM_MC_ELSE() {
10564 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10565 } IEM_MC_ENDIF();
10566 IEM_MC_ADVANCE_RIP_AND_FINISH();
10567
10568 IEM_MC_END();
10569}
10570
10571
10572/** Opcode 0xda !11/0. */
10573FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
10574{
10575 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
10576 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
10577}
10578
10579
10580/** Opcode 0xda !11/1. */
10581FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
10582{
10583 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
10584 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
10585}
10586
10587
10588/** Opcode 0xda !11/2. */
10589FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
10590{
10591 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
10592
10593 IEM_MC_BEGIN(3, 3, 0, 0);
10594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10595 IEM_MC_LOCAL(uint16_t, u16Fsw);
10596 IEM_MC_LOCAL(int32_t, i32Val2);
10597 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10598 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10599 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10600
10601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10603
10604 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10605 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10606 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10607
10608 IEM_MC_PREPARE_FPU_USAGE();
10609 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10610 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10611 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10612 } IEM_MC_ELSE() {
10613 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10614 } IEM_MC_ENDIF();
10615 IEM_MC_ADVANCE_RIP_AND_FINISH();
10616
10617 IEM_MC_END();
10618}
10619
10620
10621/** Opcode 0xda !11/3. */
10622FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
10623{
10624 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
10625
10626 IEM_MC_BEGIN(3, 3, 0, 0);
10627 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10628 IEM_MC_LOCAL(uint16_t, u16Fsw);
10629 IEM_MC_LOCAL(int32_t, i32Val2);
10630 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10631 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10632 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10633
10634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10636
10637 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10638 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10639 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10640
10641 IEM_MC_PREPARE_FPU_USAGE();
10642 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10643 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10644 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10645 } IEM_MC_ELSE() {
10646 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10647 } IEM_MC_ENDIF();
10648 IEM_MC_ADVANCE_RIP_AND_FINISH();
10649
10650 IEM_MC_END();
10651}
10652
10653
10654/** Opcode 0xda !11/4. */
10655FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
10656{
10657 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
10658 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
10659}
10660
10661
10662/** Opcode 0xda !11/5. */
10663FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
10664{
10665 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
10666 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
10667}
10668
10669
10670/** Opcode 0xda !11/6. */
10671FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
10672{
10673 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
10674 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
10675}
10676
10677
10678/** Opcode 0xda !11/7. */
10679FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
10680{
10681 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
10682 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
10683}
10684
10685
10686/**
10687 * @opcode 0xda
10688 */
10689FNIEMOP_DEF(iemOp_EscF2)
10690{
10691 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10692 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
10693 if (IEM_IS_MODRM_REG_MODE(bRm))
10694 {
10695 switch (IEM_GET_MODRM_REG_8(bRm))
10696 {
10697 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
10698 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
10699 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
10700 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
10701 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10702 case 5:
10703 if (bRm == 0xe9)
10704 return FNIEMOP_CALL(iemOp_fucompp);
10705 IEMOP_RAISE_INVALID_OPCODE_RET();
10706 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10707 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10709 }
10710 }
10711 else
10712 {
10713 switch (IEM_GET_MODRM_REG_8(bRm))
10714 {
10715 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
10716 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
10717 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
10718 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
10719 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
10720 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
10721 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
10722 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
10723 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10724 }
10725 }
10726}
10727
10728
10729/** Opcode 0xdb !11/0. */
10730FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
10731{
10732 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
10733
10734 IEM_MC_BEGIN(2, 3, 0, 0);
10735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10736 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10737 IEM_MC_LOCAL(int32_t, i32Val);
10738 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10739 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
10740
10741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10743
10744 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10745 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10746 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10747
10748 IEM_MC_PREPARE_FPU_USAGE();
10749 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10750 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
10751 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10752 } IEM_MC_ELSE() {
10753 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10754 } IEM_MC_ENDIF();
10755 IEM_MC_ADVANCE_RIP_AND_FINISH();
10756
10757 IEM_MC_END();
10758}
10759
10760
10761/** Opcode 0xdb !11/1. */
10762FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
10763{
10764 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
10765 IEM_MC_BEGIN(3, 2, 0, 0);
10766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10767 IEM_MC_LOCAL(uint16_t, u16Fsw);
10768 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10769 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10770 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10771
10772 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10774 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10775 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10776
10777 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10778 IEM_MC_PREPARE_FPU_USAGE();
10779 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10780 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10781 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10782 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10783 } IEM_MC_ELSE() {
10784 IEM_MC_IF_FCW_IM() {
10785 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10786 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10787 } IEM_MC_ENDIF();
10788 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10789 } IEM_MC_ENDIF();
10790 IEM_MC_ADVANCE_RIP_AND_FINISH();
10791
10792 IEM_MC_END();
10793}
10794
10795
10796/** Opcode 0xdb !11/2. */
10797FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
10798{
10799 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
10800 IEM_MC_BEGIN(3, 2, 0, 0);
10801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10802 IEM_MC_LOCAL(uint16_t, u16Fsw);
10803 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10804 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10805 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10806
10807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10809 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10810 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10811
10812 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10813 IEM_MC_PREPARE_FPU_USAGE();
10814 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10815 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10816 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10817 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10818 } IEM_MC_ELSE() {
10819 IEM_MC_IF_FCW_IM() {
10820 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10821 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10822 } IEM_MC_ENDIF();
10823 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10824 } IEM_MC_ENDIF();
10825 IEM_MC_ADVANCE_RIP_AND_FINISH();
10826
10827 IEM_MC_END();
10828}
10829
10830
10831/** Opcode 0xdb !11/3. */
10832FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
10833{
10834 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
10835 IEM_MC_BEGIN(3, 2, 0, 0);
10836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10837 IEM_MC_LOCAL(uint16_t, u16Fsw);
10838 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10839 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10840 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10841
10842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10844 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10845 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10846
10847 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10848 IEM_MC_PREPARE_FPU_USAGE();
10849 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10850 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10851 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10852 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10853 } IEM_MC_ELSE() {
10854 IEM_MC_IF_FCW_IM() {
10855 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10856 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10857 } IEM_MC_ENDIF();
10858 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10859 } IEM_MC_ENDIF();
10860 IEM_MC_ADVANCE_RIP_AND_FINISH();
10861
10862 IEM_MC_END();
10863}
10864
10865
10866/** Opcode 0xdb !11/5. */
10867FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
10868{
10869 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
10870
10871 IEM_MC_BEGIN(2, 3, 0, 0);
10872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10873 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10874 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
10875 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10876 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
10877
10878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10880
10881 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10882 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10883 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10884
10885 IEM_MC_PREPARE_FPU_USAGE();
10886 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10887 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
10888 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10889 } IEM_MC_ELSE() {
10890 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10891 } IEM_MC_ENDIF();
10892 IEM_MC_ADVANCE_RIP_AND_FINISH();
10893
10894 IEM_MC_END();
10895}
10896
10897
10898/** Opcode 0xdb !11/7. */
10899FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
10900{
10901 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
10902 IEM_MC_BEGIN(3, 2, 0, 0);
10903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10904 IEM_MC_LOCAL(uint16_t, u16Fsw);
10905 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10906 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
10907 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10908
10909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10911 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10912 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10913
10914 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
10915 IEM_MC_PREPARE_FPU_USAGE();
10916 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10917 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
10918 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
10919 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10920 } IEM_MC_ELSE() {
10921 IEM_MC_IF_FCW_IM() {
10922 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
10923 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
10924 } IEM_MC_ENDIF();
10925 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10926 } IEM_MC_ENDIF();
10927 IEM_MC_ADVANCE_RIP_AND_FINISH();
10928
10929 IEM_MC_END();
10930}
10931
10932
10933/** Opcode 0xdb 11/0. */
10934FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
10935{
10936 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
10937 IEM_MC_BEGIN(0, 1, 0, 0);
10938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10939 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10940
10941 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10942 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10943
10944 IEM_MC_PREPARE_FPU_USAGE();
10945 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10946 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
10947 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10948 } IEM_MC_ENDIF();
10949 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10950 } IEM_MC_ELSE() {
10951 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10952 } IEM_MC_ENDIF();
10953 IEM_MC_ADVANCE_RIP_AND_FINISH();
10954
10955 IEM_MC_END();
10956}
10957
10958
10959/** Opcode 0xdb 11/1. */
10960FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
10961{
10962 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
10963 IEM_MC_BEGIN(0, 1, 0, 0);
10964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10965 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10966
10967 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10968 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10969
10970 IEM_MC_PREPARE_FPU_USAGE();
10971 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10972 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10973 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10974 } IEM_MC_ENDIF();
10975 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10976 } IEM_MC_ELSE() {
10977 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10978 } IEM_MC_ENDIF();
10979 IEM_MC_ADVANCE_RIP_AND_FINISH();
10980
10981 IEM_MC_END();
10982}
10983
10984
10985/** Opcode 0xdb 11/2. */
10986FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
10987{
10988 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
10989 IEM_MC_BEGIN(0, 1, 0, 0);
10990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10991 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10992
10993 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10994 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10995
10996 IEM_MC_PREPARE_FPU_USAGE();
10997 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10998 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10999 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11000 } IEM_MC_ENDIF();
11001 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11002 } IEM_MC_ELSE() {
11003 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11004 } IEM_MC_ENDIF();
11005 IEM_MC_ADVANCE_RIP_AND_FINISH();
11006
11007 IEM_MC_END();
11008}
11009
11010
11011/** Opcode 0xdb 11/3. */
11012FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
11013{
11014 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
11015 IEM_MC_BEGIN(0, 1, 0, 0);
11016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11017 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11018
11019 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11020 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11021
11022 IEM_MC_PREPARE_FPU_USAGE();
11023 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11024 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
11025 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11026 } IEM_MC_ENDIF();
11027 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11028 } IEM_MC_ELSE() {
11029 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11030 } IEM_MC_ENDIF();
11031 IEM_MC_ADVANCE_RIP_AND_FINISH();
11032
11033 IEM_MC_END();
11034}
11035
11036
11037/** Opcode 0xdb 0xe0. */
11038FNIEMOP_DEF(iemOp_fneni)
11039{
11040 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
11041 IEM_MC_BEGIN(0, 0, 0, 0);
11042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11043 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11044 IEM_MC_ADVANCE_RIP_AND_FINISH();
11045 IEM_MC_END();
11046}
11047
11048
11049/** Opcode 0xdb 0xe1. */
11050FNIEMOP_DEF(iemOp_fndisi)
11051{
11052 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
11053 IEM_MC_BEGIN(0, 0, 0, 0);
11054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11055 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11056 IEM_MC_ADVANCE_RIP_AND_FINISH();
11057 IEM_MC_END();
11058}
11059
11060
11061/** Opcode 0xdb 0xe2. */
11062FNIEMOP_DEF(iemOp_fnclex)
11063{
11064 IEMOP_MNEMONIC(fnclex, "fnclex");
11065 IEM_MC_BEGIN(0, 0, 0, 0);
11066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11067 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11068 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11069 IEM_MC_CLEAR_FSW_EX();
11070 IEM_MC_ADVANCE_RIP_AND_FINISH();
11071 IEM_MC_END();
11072}
11073
11074
11075/** Opcode 0xdb 0xe3. */
11076FNIEMOP_DEF(iemOp_fninit)
11077{
11078 IEMOP_MNEMONIC(fninit, "fninit");
11079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11080 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, 0, iemCImpl_finit, false /*fCheckXcpts*/);
11081}
11082
11083
11084/** Opcode 0xdb 0xe4. */
11085FNIEMOP_DEF(iemOp_fnsetpm)
11086{
11087 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
11088 IEM_MC_BEGIN(0, 0, 0, 0);
11089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11090 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11091 IEM_MC_ADVANCE_RIP_AND_FINISH();
11092 IEM_MC_END();
11093}
11094
11095
11096/** Opcode 0xdb 0xe5. */
11097FNIEMOP_DEF(iemOp_frstpm)
11098{
11099 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
11100#if 0 /* #UDs on newer CPUs */
11101 IEM_MC_BEGIN(0, 0, 0, 0);
11102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11103 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11104 IEM_MC_ADVANCE_RIP_AND_FINISH();
11105 IEM_MC_END();
11106 return VINF_SUCCESS;
11107#else
11108 IEMOP_RAISE_INVALID_OPCODE_RET();
11109#endif
11110}
11111
11112
11113/** Opcode 0xdb 11/5. */
11114FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
11115{
11116 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
11117 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11118 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
11119 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11120}
11121
11122
11123/** Opcode 0xdb 11/6. */
11124FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
11125{
11126 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
11127 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11128 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11129 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11130}
11131
11132
11133/**
11134 * @opcode 0xdb
11135 */
11136FNIEMOP_DEF(iemOp_EscF3)
11137{
11138 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11139 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
11140 if (IEM_IS_MODRM_REG_MODE(bRm))
11141 {
11142 switch (IEM_GET_MODRM_REG_8(bRm))
11143 {
11144 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
11145 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
11146 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
11147 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
11148 case 4:
11149 switch (bRm)
11150 {
11151 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
11152 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
11153 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
11154 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
11155 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
11156 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
11157 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
11158 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
11159 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11160 }
11161 break;
11162 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
11163 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
11164 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11165 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11166 }
11167 }
11168 else
11169 {
11170 switch (IEM_GET_MODRM_REG_8(bRm))
11171 {
11172 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
11173 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
11174 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
11175 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
11176 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11177 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
11178 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11179 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
11180 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11181 }
11182 }
11183}
11184
11185
11186/**
11187 * Common worker for FPU instructions working on STn and ST0, and storing the
11188 * result in STn unless IE, DE or ZE was raised.
11189 *
11190 * @param bRm Mod R/M byte.
11191 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11192 */
11193FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11194{
11195 IEM_MC_BEGIN(3, 1, 0, 0);
11196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11197 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11198 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11199 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11200 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11201
11202 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11203 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11204
11205 IEM_MC_PREPARE_FPU_USAGE();
11206 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
11207 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11208 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11209 } IEM_MC_ELSE() {
11210 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11211 } IEM_MC_ENDIF();
11212 IEM_MC_ADVANCE_RIP_AND_FINISH();
11213
11214 IEM_MC_END();
11215}
11216
11217
11218/** Opcode 0xdc 11/0. */
11219FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
11220{
11221 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
11222 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
11223}
11224
11225
11226/** Opcode 0xdc 11/1. */
11227FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
11228{
11229 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
11230 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
11231}
11232
11233
11234/** Opcode 0xdc 11/4. */
11235FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
11236{
11237 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
11238 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
11239}
11240
11241
11242/** Opcode 0xdc 11/5. */
11243FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
11244{
11245 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
11246 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
11247}
11248
11249
11250/** Opcode 0xdc 11/6. */
11251FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
11252{
11253 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
11254 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
11255}
11256
11257
11258/** Opcode 0xdc 11/7. */
11259FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
11260{
11261 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
11262 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
11263}
11264
11265
11266/**
11267 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
11268 * memory operand, and storing the result in ST0.
11269 *
11270 * @param bRm Mod R/M byte.
11271 * @param pfnImpl Pointer to the instruction implementation (assembly).
11272 */
11273FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
11274{
11275 IEM_MC_BEGIN(3, 3, 0, 0);
11276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11277 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11278 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
11279 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11280 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
11281 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
11282
11283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11285 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11286 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11287
11288 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11289 IEM_MC_PREPARE_FPU_USAGE();
11290 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
11291 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
11292 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11293 } IEM_MC_ELSE() {
11294 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11295 } IEM_MC_ENDIF();
11296 IEM_MC_ADVANCE_RIP_AND_FINISH();
11297
11298 IEM_MC_END();
11299}
11300
11301
11302/** Opcode 0xdc !11/0. */
11303FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
11304{
11305 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
11306 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
11307}
11308
11309
11310/** Opcode 0xdc !11/1. */
11311FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
11312{
11313 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
11314 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
11315}
11316
11317
11318/** Opcode 0xdc !11/2. */
11319FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
11320{
11321 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
11322
11323 IEM_MC_BEGIN(3, 3, 0, 0);
11324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11325 IEM_MC_LOCAL(uint16_t, u16Fsw);
11326 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11327 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11328 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11329 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11330
11331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11333
11334 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11335 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11336 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11337
11338 IEM_MC_PREPARE_FPU_USAGE();
11339 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11340 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11341 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11342 } IEM_MC_ELSE() {
11343 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11344 } IEM_MC_ENDIF();
11345 IEM_MC_ADVANCE_RIP_AND_FINISH();
11346
11347 IEM_MC_END();
11348}
11349
11350
11351/** Opcode 0xdc !11/3. */
11352FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
11353{
11354 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
11355
11356 IEM_MC_BEGIN(3, 3, 0, 0);
11357 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11358 IEM_MC_LOCAL(uint16_t, u16Fsw);
11359 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11360 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11361 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11362 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11363
11364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11366
11367 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11368 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11369 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11370
11371 IEM_MC_PREPARE_FPU_USAGE();
11372 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11373 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11374 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11375 } IEM_MC_ELSE() {
11376 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11377 } IEM_MC_ENDIF();
11378 IEM_MC_ADVANCE_RIP_AND_FINISH();
11379
11380 IEM_MC_END();
11381}
11382
11383
11384/** Opcode 0xdc !11/4. */
11385FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
11386{
11387 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
11388 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
11389}
11390
11391
11392/** Opcode 0xdc !11/5. */
11393FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
11394{
11395 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
11396 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
11397}
11398
11399
11400/** Opcode 0xdc !11/6. */
11401FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
11402{
11403 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
11404 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
11405}
11406
11407
11408/** Opcode 0xdc !11/7. */
11409FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
11410{
11411 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
11412 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
11413}
11414
11415
11416/**
11417 * @opcode 0xdc
11418 */
11419FNIEMOP_DEF(iemOp_EscF4)
11420{
11421 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11422 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
11423 if (IEM_IS_MODRM_REG_MODE(bRm))
11424 {
11425 switch (IEM_GET_MODRM_REG_8(bRm))
11426 {
11427 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
11428 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
11429 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
11430 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
11431 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
11432 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
11433 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
11434 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
11435 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11436 }
11437 }
11438 else
11439 {
11440 switch (IEM_GET_MODRM_REG_8(bRm))
11441 {
11442 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
11443 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
11444 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
11445 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
11446 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
11447 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
11448 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
11449 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
11450 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11451 }
11452 }
11453}
11454
11455
11456/** Opcode 0xdd !11/0.
11457 * @sa iemOp_fld_m32r */
11458FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
11459{
11460 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
11461
11462 IEM_MC_BEGIN(2, 3, 0, 0);
11463 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11464 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11465 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
11466 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11467 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
11468
11469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11471 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11472 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11473
11474 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11475 IEM_MC_PREPARE_FPU_USAGE();
11476 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11477 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
11478 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11479 } IEM_MC_ELSE() {
11480 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11481 } IEM_MC_ENDIF();
11482 IEM_MC_ADVANCE_RIP_AND_FINISH();
11483
11484 IEM_MC_END();
11485}
11486
11487
11488/** Opcode 0xdd !11/0. */
11489FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
11490{
11491 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
11492 IEM_MC_BEGIN(3, 2, 0, 0);
11493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11494 IEM_MC_LOCAL(uint16_t, u16Fsw);
11495 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11496 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11497 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11498
11499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11501 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11502 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11503
11504 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11505 IEM_MC_PREPARE_FPU_USAGE();
11506 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11507 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11508 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11509 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11510 } IEM_MC_ELSE() {
11511 IEM_MC_IF_FCW_IM() {
11512 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11513 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
11514 } IEM_MC_ENDIF();
11515 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11516 } IEM_MC_ENDIF();
11517 IEM_MC_ADVANCE_RIP_AND_FINISH();
11518
11519 IEM_MC_END();
11520}
11521
11522
11523/** Opcode 0xdd !11/0. */
11524FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
11525{
11526 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
11527 IEM_MC_BEGIN(3, 2, 0, 0);
11528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11529 IEM_MC_LOCAL(uint16_t, u16Fsw);
11530 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11531 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11532 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11533
11534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11536 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11537 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11538
11539 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11540 IEM_MC_PREPARE_FPU_USAGE();
11541 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11542 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11543 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11544 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11545 } IEM_MC_ELSE() {
11546 IEM_MC_IF_FCW_IM() {
11547 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11548 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11549 } IEM_MC_ENDIF();
11550 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11551 } IEM_MC_ENDIF();
11552 IEM_MC_ADVANCE_RIP_AND_FINISH();
11553
11554 IEM_MC_END();
11555}
11556
11557
11558
11559
11560/** Opcode 0xdd !11/0. */
11561FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
11562{
11563 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
11564 IEM_MC_BEGIN(3, 2, 0, 0);
11565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11566 IEM_MC_LOCAL(uint16_t, u16Fsw);
11567 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11568 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11569 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11570
11571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11573 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11574 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11575
11576 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11577 IEM_MC_PREPARE_FPU_USAGE();
11578 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11579 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11580 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11581 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11582 } IEM_MC_ELSE() {
11583 IEM_MC_IF_FCW_IM() {
11584 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11585 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11586 } IEM_MC_ENDIF();
11587 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11588 } IEM_MC_ENDIF();
11589 IEM_MC_ADVANCE_RIP_AND_FINISH();
11590
11591 IEM_MC_END();
11592}
11593
11594
11595/** Opcode 0xdd !11/0. */
11596FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
11597{
11598 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
11599 IEM_MC_BEGIN(3, 0, 0, 0);
11600 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
11601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11602
11603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11604 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11605 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11606
11607 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11608 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11609 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
11610 IEM_MC_END();
11611}
11612
11613
11614/** Opcode 0xdd !11/0. */
11615FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
11616{
11617 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
11618 IEM_MC_BEGIN(3, 0, 0, 0);
11619 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
11620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11621
11622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11623 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11624 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
11625
11626 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11627 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11628 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
11629 IEM_MC_END();
11630}
11631
11632/** Opcode 0xdd !11/0. */
11633FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
11634{
11635 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
11636
11637 IEM_MC_BEGIN(0, 2, 0, 0);
11638 IEM_MC_LOCAL(uint16_t, u16Tmp);
11639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11640
11641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11643 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11644
11645 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11646 IEM_MC_FETCH_FSW(u16Tmp);
11647 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
11648 IEM_MC_ADVANCE_RIP_AND_FINISH();
11649
11650/** @todo Debug / drop a hint to the verifier that things may differ
11651 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
11652 * NT4SP1. (X86_FSW_PE) */
11653 IEM_MC_END();
11654}
11655
11656
11657/** Opcode 0xdd 11/0. */
11658FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
11659{
11660 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
11661 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
11662 unmodified. */
11663 IEM_MC_BEGIN(0, 0, 0, 0);
11664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11665
11666 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11667 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11668
11669 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11670 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11671 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11672
11673 IEM_MC_ADVANCE_RIP_AND_FINISH();
11674 IEM_MC_END();
11675}
11676
11677
11678/** Opcode 0xdd 11/1. */
11679FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
11680{
11681 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
11682 IEM_MC_BEGIN(0, 2, 0, 0);
11683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11684 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11685 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11686 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11687 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11688
11689 IEM_MC_PREPARE_FPU_USAGE();
11690 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11691 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11692 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11693 } IEM_MC_ELSE() {
11694 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11695 } IEM_MC_ENDIF();
11696
11697 IEM_MC_ADVANCE_RIP_AND_FINISH();
11698 IEM_MC_END();
11699}
11700
11701
11702/** Opcode 0xdd 11/3. */
11703FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
11704{
11705 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
11706 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
11707}
11708
11709
11710/** Opcode 0xdd 11/4. */
11711FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
11712{
11713 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
11714 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
11715}
11716
11717
11718/**
11719 * @opcode 0xdd
11720 */
11721FNIEMOP_DEF(iemOp_EscF5)
11722{
11723 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11724 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
11725 if (IEM_IS_MODRM_REG_MODE(bRm))
11726 {
11727 switch (IEM_GET_MODRM_REG_8(bRm))
11728 {
11729 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
11730 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
11731 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
11732 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
11733 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
11734 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
11735 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11736 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11737 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11738 }
11739 }
11740 else
11741 {
11742 switch (IEM_GET_MODRM_REG_8(bRm))
11743 {
11744 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
11745 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
11746 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
11747 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
11748 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
11749 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
11750 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
11751 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
11752 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11753 }
11754 }
11755}
11756
11757
11758/** Opcode 0xde 11/0. */
11759FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
11760{
11761 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
11762 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
11763}
11764
11765
11766/** Opcode 0xde 11/0. */
11767FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
11768{
11769 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
11770 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
11771}
11772
11773
11774/** Opcode 0xde 0xd9. */
11775FNIEMOP_DEF(iemOp_fcompp)
11776{
11777 IEMOP_MNEMONIC(fcompp, "fcompp");
11778 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
11779}
11780
11781
11782/** Opcode 0xde 11/4. */
11783FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
11784{
11785 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
11786 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
11787}
11788
11789
11790/** Opcode 0xde 11/5. */
11791FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
11792{
11793 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
11794 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
11795}
11796
11797
11798/** Opcode 0xde 11/6. */
11799FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
11800{
11801 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
11802 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
11803}
11804
11805
11806/** Opcode 0xde 11/7. */
11807FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
11808{
11809 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
11810 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
11811}
11812
11813
11814/**
11815 * Common worker for FPU instructions working on ST0 and an m16i, and storing
11816 * the result in ST0.
11817 *
11818 * @param bRm Mod R/M byte.
11819 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11820 */
11821FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
11822{
11823 IEM_MC_BEGIN(3, 3, 0, 0);
11824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11825 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11826 IEM_MC_LOCAL(int16_t, i16Val2);
11827 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11828 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11829 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11830
11831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11833
11834 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11835 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11836 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11837
11838 IEM_MC_PREPARE_FPU_USAGE();
11839 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11840 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
11841 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11842 } IEM_MC_ELSE() {
11843 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11844 } IEM_MC_ENDIF();
11845 IEM_MC_ADVANCE_RIP_AND_FINISH();
11846
11847 IEM_MC_END();
11848}
11849
11850
11851/** Opcode 0xde !11/0. */
11852FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
11853{
11854 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
11855 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
11856}
11857
11858
11859/** Opcode 0xde !11/1. */
11860FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
11861{
11862 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
11863 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
11864}
11865
11866
11867/** Opcode 0xde !11/2. */
11868FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
11869{
11870 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
11871
11872 IEM_MC_BEGIN(3, 3, 0, 0);
11873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11874 IEM_MC_LOCAL(uint16_t, u16Fsw);
11875 IEM_MC_LOCAL(int16_t, i16Val2);
11876 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11877 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11878 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11879
11880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11882
11883 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11884 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11885 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11886
11887 IEM_MC_PREPARE_FPU_USAGE();
11888 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11889 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11890 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11891 } IEM_MC_ELSE() {
11892 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11893 } IEM_MC_ENDIF();
11894 IEM_MC_ADVANCE_RIP_AND_FINISH();
11895
11896 IEM_MC_END();
11897}
11898
11899
11900/** Opcode 0xde !11/3. */
11901FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
11902{
11903 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
11904
11905 IEM_MC_BEGIN(3, 3, 0, 0);
11906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11907 IEM_MC_LOCAL(uint16_t, u16Fsw);
11908 IEM_MC_LOCAL(int16_t, i16Val2);
11909 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11910 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11911 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11912
11913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11915
11916 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11917 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11918 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11919
11920 IEM_MC_PREPARE_FPU_USAGE();
11921 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11922 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11923 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11924 } IEM_MC_ELSE() {
11925 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11926 } IEM_MC_ENDIF();
11927 IEM_MC_ADVANCE_RIP_AND_FINISH();
11928
11929 IEM_MC_END();
11930}
11931
11932
11933/** Opcode 0xde !11/4. */
11934FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
11935{
11936 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
11937 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
11938}
11939
11940
11941/** Opcode 0xde !11/5. */
11942FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
11943{
11944 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
11945 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
11946}
11947
11948
11949/** Opcode 0xde !11/6. */
11950FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
11951{
11952 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
11953 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
11954}
11955
11956
11957/** Opcode 0xde !11/7. */
11958FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
11959{
11960 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
11961 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
11962}
11963
11964
11965/**
11966 * @opcode 0xde
11967 */
11968FNIEMOP_DEF(iemOp_EscF6)
11969{
11970 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11971 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
11972 if (IEM_IS_MODRM_REG_MODE(bRm))
11973 {
11974 switch (IEM_GET_MODRM_REG_8(bRm))
11975 {
11976 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
11977 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
11978 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
11979 case 3: if (bRm == 0xd9)
11980 return FNIEMOP_CALL(iemOp_fcompp);
11981 IEMOP_RAISE_INVALID_OPCODE_RET();
11982 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
11983 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
11984 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
11985 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
11986 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11987 }
11988 }
11989 else
11990 {
11991 switch (IEM_GET_MODRM_REG_8(bRm))
11992 {
11993 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
11994 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
11995 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
11996 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
11997 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
11998 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
11999 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
12000 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
12001 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12002 }
12003 }
12004}
12005
12006
12007/** Opcode 0xdf 11/0.
12008 * Undocument instruction, assumed to work like ffree + fincstp. */
12009FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
12010{
12011 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
12012 IEM_MC_BEGIN(0, 0, 0, 0);
12013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12014
12015 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12016 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12017
12018 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12019 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12020 IEM_MC_FPU_STACK_INC_TOP();
12021 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12022
12023 IEM_MC_ADVANCE_RIP_AND_FINISH();
12024 IEM_MC_END();
12025}
12026
12027
12028/** Opcode 0xdf 0xe0. */
12029FNIEMOP_DEF(iemOp_fnstsw_ax)
12030{
12031 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
12032 IEM_MC_BEGIN(0, 1, 0, 0);
12033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12034 IEM_MC_LOCAL(uint16_t, u16Tmp);
12035 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12036 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12037 IEM_MC_FETCH_FSW(u16Tmp);
12038 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12039 IEM_MC_ADVANCE_RIP_AND_FINISH();
12040 IEM_MC_END();
12041}
12042
12043
12044/** Opcode 0xdf 11/5. */
12045FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
12046{
12047 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
12048 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12049 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12050 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12051}
12052
12053
12054/** Opcode 0xdf 11/6. */
12055FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
12056{
12057 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
12058 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12059 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12060 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12061}
12062
12063
12064/** Opcode 0xdf !11/0. */
12065FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
12066{
12067 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
12068
12069 IEM_MC_BEGIN(2, 3, 0, 0);
12070 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12071 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12072 IEM_MC_LOCAL(int16_t, i16Val);
12073 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12074 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
12075
12076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12078
12079 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12080 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12081 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12082
12083 IEM_MC_PREPARE_FPU_USAGE();
12084 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12085 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
12086 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12087 } IEM_MC_ELSE() {
12088 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12089 } IEM_MC_ENDIF();
12090 IEM_MC_ADVANCE_RIP_AND_FINISH();
12091
12092 IEM_MC_END();
12093}
12094
12095
12096/** Opcode 0xdf !11/1. */
12097FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
12098{
12099 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
12100 IEM_MC_BEGIN(3, 2, 0, 0);
12101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12102 IEM_MC_LOCAL(uint16_t, u16Fsw);
12103 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12104 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12105 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12106
12107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12109 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12110 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12111
12112 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
12113 IEM_MC_PREPARE_FPU_USAGE();
12114 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12115 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12116 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
12117 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12118 } IEM_MC_ELSE() {
12119 IEM_MC_IF_FCW_IM() {
12120 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12121 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
12122 } IEM_MC_ENDIF();
12123 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12124 } IEM_MC_ENDIF();
12125 IEM_MC_ADVANCE_RIP_AND_FINISH();
12126
12127 IEM_MC_END();
12128}
12129
12130
12131/** Opcode 0xdf !11/2. */
12132FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
12133{
12134 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
12135 IEM_MC_BEGIN(3, 2, 0, 0);
12136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12137 IEM_MC_LOCAL(uint16_t, u16Fsw);
12138 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12139 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12140 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12141
12142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12144 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12145 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12146
12147 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
12148 IEM_MC_PREPARE_FPU_USAGE();
12149 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12150 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12151 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
12152 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12153 } IEM_MC_ELSE() {
12154 IEM_MC_IF_FCW_IM() {
12155 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12156 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
12157 } IEM_MC_ENDIF();
12158 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12159 } IEM_MC_ENDIF();
12160 IEM_MC_ADVANCE_RIP_AND_FINISH();
12161
12162 IEM_MC_END();
12163}
12164
12165
12166/** Opcode 0xdf !11/3. */
12167FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
12168{
12169 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
12170 IEM_MC_BEGIN(3, 2, 0, 0);
12171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12172 IEM_MC_LOCAL(uint16_t, u16Fsw);
12173 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12174 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12175 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12176
12177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12179 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12180 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12181
12182 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
12183 IEM_MC_PREPARE_FPU_USAGE();
12184 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12185 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12186 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
12187 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12188 } IEM_MC_ELSE() {
12189 IEM_MC_IF_FCW_IM() {
12190 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12191 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
12192 } IEM_MC_ENDIF();
12193 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12194 } IEM_MC_ENDIF();
12195 IEM_MC_ADVANCE_RIP_AND_FINISH();
12196
12197 IEM_MC_END();
12198}
12199
12200
12201/** Opcode 0xdf !11/4. */
12202FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
12203{
12204 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
12205
12206 IEM_MC_BEGIN(2, 3, 0, 0);
12207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12208 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12209 IEM_MC_LOCAL(RTPBCD80U, d80Val);
12210 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12211 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
12212
12213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12215
12216 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12217 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12218 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12219
12220 IEM_MC_PREPARE_FPU_USAGE();
12221 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12222 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
12223 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12224 } IEM_MC_ELSE() {
12225 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12226 } IEM_MC_ENDIF();
12227 IEM_MC_ADVANCE_RIP_AND_FINISH();
12228
12229 IEM_MC_END();
12230}
12231
12232
12233/** Opcode 0xdf !11/5. */
12234FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
12235{
12236 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
12237
12238 IEM_MC_BEGIN(2, 3, 0, 0);
12239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12240 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12241 IEM_MC_LOCAL(int64_t, i64Val);
12242 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12243 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
12244
12245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12247
12248 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12249 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12250 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12251
12252 IEM_MC_PREPARE_FPU_USAGE();
12253 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12254 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
12255 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12256 } IEM_MC_ELSE() {
12257 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12258 } IEM_MC_ENDIF();
12259 IEM_MC_ADVANCE_RIP_AND_FINISH();
12260
12261 IEM_MC_END();
12262}
12263
12264
12265/** Opcode 0xdf !11/6. */
12266FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
12267{
12268 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
12269 IEM_MC_BEGIN(3, 2, 0, 0);
12270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12271 IEM_MC_LOCAL(uint16_t, u16Fsw);
12272 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12273 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
12274 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12275
12276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12278 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12279 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12280
12281 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
12282 IEM_MC_PREPARE_FPU_USAGE();
12283 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12284 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
12285 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
12286 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12287 } IEM_MC_ELSE() {
12288 IEM_MC_IF_FCW_IM() {
12289 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
12290 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
12291 } IEM_MC_ENDIF();
12292 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12293 } IEM_MC_ENDIF();
12294 IEM_MC_ADVANCE_RIP_AND_FINISH();
12295
12296 IEM_MC_END();
12297}
12298
12299
12300/** Opcode 0xdf !11/7. */
12301FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
12302{
12303 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
12304 IEM_MC_BEGIN(3, 2, 0, 0);
12305 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12306 IEM_MC_LOCAL(uint16_t, u16Fsw);
12307 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12308 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12309 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12310
12311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12313 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12314 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12315
12316 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
12317 IEM_MC_PREPARE_FPU_USAGE();
12318 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12319 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12320 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
12321 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12322 } IEM_MC_ELSE() {
12323 IEM_MC_IF_FCW_IM() {
12324 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12325 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
12326 } IEM_MC_ENDIF();
12327 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12328 } IEM_MC_ENDIF();
12329 IEM_MC_ADVANCE_RIP_AND_FINISH();
12330
12331 IEM_MC_END();
12332}
12333
12334
12335/**
12336 * @opcode 0xdf
12337 */
12338FNIEMOP_DEF(iemOp_EscF7)
12339{
12340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12341 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
12342 if (IEM_IS_MODRM_REG_MODE(bRm))
12343 {
12344 switch (IEM_GET_MODRM_REG_8(bRm))
12345 {
12346 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
12347 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
12348 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
12349 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
12350 case 4: if (bRm == 0xe0)
12351 return FNIEMOP_CALL(iemOp_fnstsw_ax);
12352 IEMOP_RAISE_INVALID_OPCODE_RET();
12353 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
12354 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
12355 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12357 }
12358 }
12359 else
12360 {
12361 switch (IEM_GET_MODRM_REG_8(bRm))
12362 {
12363 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
12364 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
12365 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
12366 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
12367 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
12368 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
12369 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
12370 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
12371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12372 }
12373 }
12374}
12375
12376
12377/**
12378 * @opcode 0xe0
12379 */
12380FNIEMOP_DEF(iemOp_loopne_Jb)
12381{
12382 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
12383 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12384 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12385
12386 switch (pVCpu->iem.s.enmEffAddrMode)
12387 {
12388 case IEMMODE_16BIT:
12389 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12391 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12392 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12393 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12394 } IEM_MC_ELSE() {
12395 IEM_MC_ADVANCE_RIP_AND_FINISH();
12396 } IEM_MC_ENDIF();
12397 IEM_MC_END();
12398 break;
12399
12400 case IEMMODE_32BIT:
12401 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12403 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12404 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12405 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12406 } IEM_MC_ELSE() {
12407 IEM_MC_ADVANCE_RIP_AND_FINISH();
12408 } IEM_MC_ENDIF();
12409 IEM_MC_END();
12410 break;
12411
12412 case IEMMODE_64BIT:
12413 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12415 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12416 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12417 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12418 } IEM_MC_ELSE() {
12419 IEM_MC_ADVANCE_RIP_AND_FINISH();
12420 } IEM_MC_ENDIF();
12421 IEM_MC_END();
12422 break;
12423
12424 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12425 }
12426}
12427
12428
12429/**
12430 * @opcode 0xe1
12431 */
12432FNIEMOP_DEF(iemOp_loope_Jb)
12433{
12434 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
12435 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12436 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12437
12438 switch (pVCpu->iem.s.enmEffAddrMode)
12439 {
12440 case IEMMODE_16BIT:
12441 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12443 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12444 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
12445 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12446 } IEM_MC_ELSE() {
12447 IEM_MC_ADVANCE_RIP_AND_FINISH();
12448 } IEM_MC_ENDIF();
12449 IEM_MC_END();
12450 break;
12451
12452 case IEMMODE_32BIT:
12453 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12455 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12456 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
12457 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12458 } IEM_MC_ELSE() {
12459 IEM_MC_ADVANCE_RIP_AND_FINISH();
12460 } IEM_MC_ENDIF();
12461 IEM_MC_END();
12462 break;
12463
12464 case IEMMODE_64BIT:
12465 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12467 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12468 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
12469 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12470 } IEM_MC_ELSE() {
12471 IEM_MC_ADVANCE_RIP_AND_FINISH();
12472 } IEM_MC_ENDIF();
12473 IEM_MC_END();
12474 break;
12475
12476 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12477 }
12478}
12479
12480
12481/**
12482 * @opcode 0xe2
12483 */
12484FNIEMOP_DEF(iemOp_loop_Jb)
12485{
12486 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
12487 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12488 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12489
12490 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
12491 * using the 32-bit operand size override. How can that be restarted? See
12492 * weird pseudo code in intel manual. */
12493
12494 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
12495 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
12496 * the loop causes guest crashes, but when logging it's nice to skip a few million
12497 * lines of useless output. */
12498#if defined(LOG_ENABLED)
12499 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
12500 switch (pVCpu->iem.s.enmEffAddrMode)
12501 {
12502 case IEMMODE_16BIT:
12503 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12505 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
12506 IEM_MC_ADVANCE_RIP_AND_FINISH();
12507 IEM_MC_END();
12508 break;
12509
12510 case IEMMODE_32BIT:
12511 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12513 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
12514 IEM_MC_ADVANCE_RIP_AND_FINISH();
12515 IEM_MC_END();
12516 break;
12517
12518 case IEMMODE_64BIT:
12519 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12521 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
12522 IEM_MC_ADVANCE_RIP_AND_FINISH();
12523 IEM_MC_END();
12524 break;
12525
12526 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12527 }
12528#endif
12529
12530 switch (pVCpu->iem.s.enmEffAddrMode)
12531 {
12532 case IEMMODE_16BIT:
12533 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12535 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12536 IEM_MC_IF_CX_IS_NZ() {
12537 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12538 } IEM_MC_ELSE() {
12539 IEM_MC_ADVANCE_RIP_AND_FINISH();
12540 } IEM_MC_ENDIF();
12541 IEM_MC_END();
12542 break;
12543
12544 case IEMMODE_32BIT:
12545 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12547 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12548 IEM_MC_IF_ECX_IS_NZ() {
12549 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12550 } IEM_MC_ELSE() {
12551 IEM_MC_ADVANCE_RIP_AND_FINISH();
12552 } IEM_MC_ENDIF();
12553 IEM_MC_END();
12554 break;
12555
12556 case IEMMODE_64BIT:
12557 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12559 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12560 IEM_MC_IF_RCX_IS_NZ() {
12561 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12562 } IEM_MC_ELSE() {
12563 IEM_MC_ADVANCE_RIP_AND_FINISH();
12564 } IEM_MC_ENDIF();
12565 IEM_MC_END();
12566 break;
12567
12568 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12569 }
12570}
12571
12572
12573/**
12574 * @opcode 0xe3
12575 */
12576FNIEMOP_DEF(iemOp_jecxz_Jb)
12577{
12578 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
12579 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12580 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12581
12582 switch (pVCpu->iem.s.enmEffAddrMode)
12583 {
12584 case IEMMODE_16BIT:
12585 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12587 IEM_MC_IF_CX_IS_NZ() {
12588 IEM_MC_ADVANCE_RIP_AND_FINISH();
12589 } IEM_MC_ELSE() {
12590 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12591 } IEM_MC_ENDIF();
12592 IEM_MC_END();
12593 break;
12594
12595 case IEMMODE_32BIT:
12596 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12598 IEM_MC_IF_ECX_IS_NZ() {
12599 IEM_MC_ADVANCE_RIP_AND_FINISH();
12600 } IEM_MC_ELSE() {
12601 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12602 } IEM_MC_ENDIF();
12603 IEM_MC_END();
12604 break;
12605
12606 case IEMMODE_64BIT:
12607 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12609 IEM_MC_IF_RCX_IS_NZ() {
12610 IEM_MC_ADVANCE_RIP_AND_FINISH();
12611 } IEM_MC_ELSE() {
12612 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12613 } IEM_MC_ENDIF();
12614 IEM_MC_END();
12615 break;
12616
12617 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12618 }
12619}
12620
12621
12622/** Opcode 0xe4 */
12623FNIEMOP_DEF(iemOp_in_AL_Ib)
12624{
12625 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
12626 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12628 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12629 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12630}
12631
12632
12633/** Opcode 0xe5 */
12634FNIEMOP_DEF(iemOp_in_eAX_Ib)
12635{
12636 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
12637 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12639 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12640 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12641 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12642}
12643
12644
12645/** Opcode 0xe6 */
12646FNIEMOP_DEF(iemOp_out_Ib_AL)
12647{
12648 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
12649 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12651 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12652 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12653}
12654
12655
12656/** Opcode 0xe7 */
12657FNIEMOP_DEF(iemOp_out_Ib_eAX)
12658{
12659 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
12660 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12662 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12663 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12664 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12665}
12666
12667
12668/**
12669 * @opcode 0xe8
12670 */
12671FNIEMOP_DEF(iemOp_call_Jv)
12672{
12673 IEMOP_MNEMONIC(call_Jv, "call Jv");
12674 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12675 switch (pVCpu->iem.s.enmEffOpSize)
12676 {
12677 case IEMMODE_16BIT:
12678 {
12679 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12680 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
12681 iemCImpl_call_rel_16, (int16_t)u16Imm);
12682 }
12683
12684 case IEMMODE_32BIT:
12685 {
12686 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12687 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
12688 iemCImpl_call_rel_32, (int32_t)u32Imm);
12689 }
12690
12691 case IEMMODE_64BIT:
12692 {
12693 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12694 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
12695 iemCImpl_call_rel_64, u64Imm);
12696 }
12697
12698 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12699 }
12700}
12701
12702
12703/**
12704 * @opcode 0xe9
12705 */
12706FNIEMOP_DEF(iemOp_jmp_Jv)
12707{
12708 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
12709 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12710 switch (pVCpu->iem.s.enmEffOpSize)
12711 {
12712 case IEMMODE_16BIT:
12713 IEM_MC_BEGIN(0, 0, 0, 0);
12714 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
12715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12716 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
12717 IEM_MC_END();
12718 break;
12719
12720 case IEMMODE_64BIT:
12721 case IEMMODE_32BIT:
12722 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12723 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
12724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12725 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
12726 IEM_MC_END();
12727 break;
12728
12729 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12730 }
12731}
12732
12733
12734/**
12735 * @opcode 0xea
12736 */
12737FNIEMOP_DEF(iemOp_jmp_Ap)
12738{
12739 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
12740 IEMOP_HLP_NO_64BIT();
12741
12742 /* Decode the far pointer address and pass it on to the far call C implementation. */
12743 uint32_t off32Seg;
12744 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12745 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
12746 else
12747 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
12748 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
12749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12750 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
12751 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
12752 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
12753 /** @todo make task-switches, ring-switches, ++ return non-zero status */
12754}
12755
12756
12757/**
12758 * @opcode 0xeb
12759 */
12760FNIEMOP_DEF(iemOp_jmp_Jb)
12761{
12762 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
12763 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12764 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12765
12766 IEM_MC_BEGIN(0, 0, 0, 0);
12767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12768 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12769 IEM_MC_END();
12770}
12771
12772
12773/** Opcode 0xec */
12774FNIEMOP_DEF(iemOp_in_AL_DX)
12775{
12776 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
12777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12778 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12779 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12780 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
12781}
12782
12783
12784/** Opcode 0xed */
12785FNIEMOP_DEF(iemOp_in_eAX_DX)
12786{
12787 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
12788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12789 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12790 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12791 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12792 pVCpu->iem.s.enmEffAddrMode);
12793}
12794
12795
12796/** Opcode 0xee */
12797FNIEMOP_DEF(iemOp_out_DX_AL)
12798{
12799 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
12800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12801 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12802 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
12803}
12804
12805
12806/** Opcode 0xef */
12807FNIEMOP_DEF(iemOp_out_DX_eAX)
12808{
12809 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
12810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12811 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12812 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12813 pVCpu->iem.s.enmEffAddrMode);
12814}
12815
12816
12817/**
12818 * @opcode 0xf0
12819 */
12820FNIEMOP_DEF(iemOp_lock)
12821{
12822 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
12823 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12824 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
12825
12826 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12827 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12828}
12829
12830
12831/**
12832 * @opcode 0xf1
12833 */
12834FNIEMOP_DEF(iemOp_int1)
12835{
12836 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
12837 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
12838 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
12839 * LOADALL memo. Needs some testing. */
12840 IEMOP_HLP_MIN_386();
12841 /** @todo testcase! */
12842 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
12843 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
12844 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
12845}
12846
12847
12848/**
12849 * @opcode 0xf2
12850 */
12851FNIEMOP_DEF(iemOp_repne)
12852{
12853 /* This overrides any previous REPE prefix. */
12854 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
12855 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
12856 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
12857
12858 /* For the 4 entry opcode tables, REPNZ overrides any previous
12859 REPZ and operand size prefixes. */
12860 pVCpu->iem.s.idxPrefix = 3;
12861
12862 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12863 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12864}
12865
12866
12867/**
12868 * @opcode 0xf3
12869 */
12870FNIEMOP_DEF(iemOp_repe)
12871{
12872 /* This overrides any previous REPNE prefix. */
12873 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
12874 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
12875 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
12876
12877 /* For the 4 entry opcode tables, REPNZ overrides any previous
12878 REPNZ and operand size prefixes. */
12879 pVCpu->iem.s.idxPrefix = 2;
12880
12881 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12882 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12883}
12884
12885
12886/**
12887 * @opcode 0xf4
12888 */
12889FNIEMOP_DEF(iemOp_hlt)
12890{
12891 IEMOP_MNEMONIC(hlt, "hlt");
12892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12893 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_hlt);
12894}
12895
12896
12897/**
12898 * @opcode 0xf5
12899 */
12900FNIEMOP_DEF(iemOp_cmc)
12901{
12902 IEMOP_MNEMONIC(cmc, "cmc");
12903 IEM_MC_BEGIN(0, 0, 0, 0);
12904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12905 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
12906 IEM_MC_ADVANCE_RIP_AND_FINISH();
12907 IEM_MC_END();
12908}
12909
12910
12911/**
12912 * Body for of 'inc/dec/not/neg Eb'.
12913 */
12914#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
12915 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
12916 { \
12917 /* register access */ \
12918 IEM_MC_BEGIN(2, 0, 0, 0); \
12919 IEMOP_HLP_DONE_DECODING(); \
12920 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12921 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12922 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
12923 IEM_MC_REF_EFLAGS(pEFlags); \
12924 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12925 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12926 IEM_MC_END(); \
12927 } \
12928 else \
12929 { \
12930 /* memory access. */ \
12931 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
12932 { \
12933 IEM_MC_BEGIN(2, 2, 0, 0); \
12934 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12935 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12937 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12938 \
12939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12940 IEMOP_HLP_DONE_DECODING(); \
12941 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12942 IEM_MC_FETCH_EFLAGS(EFlags); \
12943 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12944 \
12945 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12946 IEM_MC_COMMIT_EFLAGS(EFlags); \
12947 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12948 IEM_MC_END(); \
12949 } \
12950 else \
12951 { \
12952 IEM_MC_BEGIN(2, 2, 0, 0); \
12953 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12954 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12956 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12957 \
12958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12959 IEMOP_HLP_DONE_DECODING(); \
12960 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12961 IEM_MC_FETCH_EFLAGS(EFlags); \
12962 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
12963 \
12964 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12965 IEM_MC_COMMIT_EFLAGS(EFlags); \
12966 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12967 IEM_MC_END(); \
12968 } \
12969 } \
12970 (void)0
12971
12972
12973/**
12974 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
12975 */
12976#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
12977 if (IEM_IS_MODRM_REG_MODE(bRm)) \
12978 { \
12979 /* \
12980 * Register target \
12981 */ \
12982 switch (pVCpu->iem.s.enmEffOpSize) \
12983 { \
12984 case IEMMODE_16BIT: \
12985 IEM_MC_BEGIN(2, 0, 0, 0); \
12986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12987 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12988 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12989 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12990 IEM_MC_REF_EFLAGS(pEFlags); \
12991 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
12992 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12993 IEM_MC_END(); \
12994 break; \
12995 \
12996 case IEMMODE_32BIT: \
12997 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0); \
12998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12999 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13000 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13001 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13002 IEM_MC_REF_EFLAGS(pEFlags); \
13003 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13004 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
13005 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13006 IEM_MC_END(); \
13007 break; \
13008 \
13009 case IEMMODE_64BIT: \
13010 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0); \
13011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13012 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13013 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13014 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13015 IEM_MC_REF_EFLAGS(pEFlags); \
13016 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13017 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13018 IEM_MC_END(); \
13019 break; \
13020 \
13021 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13022 } \
13023 } \
13024 else \
13025 { \
13026 /* \
13027 * Memory target. \
13028 */ \
13029 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
13030 { \
13031 switch (pVCpu->iem.s.enmEffOpSize) \
13032 { \
13033 case IEMMODE_16BIT: \
13034 IEM_MC_BEGIN(2, 3, 0, 0); \
13035 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13036 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13038 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13039 \
13040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13042 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13043 IEM_MC_FETCH_EFLAGS(EFlags); \
13044 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13045 \
13046 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
13047 IEM_MC_COMMIT_EFLAGS(EFlags); \
13048 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13049 IEM_MC_END(); \
13050 break; \
13051 \
13052 case IEMMODE_32BIT: \
13053 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13054 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13055 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13057 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13058 \
13059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13061 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13062 IEM_MC_FETCH_EFLAGS(EFlags); \
13063 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13064 \
13065 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
13066 IEM_MC_COMMIT_EFLAGS(EFlags); \
13067 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13068 IEM_MC_END(); \
13069 break; \
13070 \
13071 case IEMMODE_64BIT: \
13072 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13073 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13074 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13076 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13077 \
13078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13080 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13081 IEM_MC_FETCH_EFLAGS(EFlags); \
13082 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13083 \
13084 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
13085 IEM_MC_COMMIT_EFLAGS(EFlags); \
13086 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13087 IEM_MC_END(); \
13088 break; \
13089 \
13090 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13091 } \
13092 } \
13093 else \
13094 { \
13095 (void)0
13096
13097#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
13098 switch (pVCpu->iem.s.enmEffOpSize) \
13099 { \
13100 case IEMMODE_16BIT: \
13101 IEM_MC_BEGIN(2, 3, 0, 0); \
13102 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13103 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13105 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13106 \
13107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13108 IEMOP_HLP_DONE_DECODING(); \
13109 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13110 IEM_MC_FETCH_EFLAGS(EFlags); \
13111 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
13112 \
13113 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
13114 IEM_MC_COMMIT_EFLAGS(EFlags); \
13115 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13116 IEM_MC_END(); \
13117 break; \
13118 \
13119 case IEMMODE_32BIT: \
13120 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13121 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13122 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13124 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13125 \
13126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13127 IEMOP_HLP_DONE_DECODING(); \
13128 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13129 IEM_MC_FETCH_EFLAGS(EFlags); \
13130 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
13131 \
13132 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
13133 IEM_MC_COMMIT_EFLAGS(EFlags); \
13134 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13135 IEM_MC_END(); \
13136 break; \
13137 \
13138 case IEMMODE_64BIT: \
13139 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13140 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13141 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13143 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13144 \
13145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13146 IEMOP_HLP_DONE_DECODING(); \
13147 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13148 IEM_MC_FETCH_EFLAGS(EFlags); \
13149 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
13150 \
13151 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
13152 IEM_MC_COMMIT_EFLAGS(EFlags); \
13153 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13154 IEM_MC_END(); \
13155 break; \
13156 \
13157 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13158 } \
13159 } \
13160 } \
13161 (void)0
13162
13163
13164/**
13165 * @opmaps grp3_f6
13166 * @opcode /0
13167 * @todo also /1
13168 */
13169FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
13170{
13171 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
13172 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
13173
13174 if (IEM_IS_MODRM_REG_MODE(bRm))
13175 {
13176 /* register access */
13177 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13178 IEM_MC_BEGIN(3, 0, 0, 0);
13179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13180 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13181 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
13182 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13183 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13184 IEM_MC_REF_EFLAGS(pEFlags);
13185 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13186 IEM_MC_ADVANCE_RIP_AND_FINISH();
13187 IEM_MC_END();
13188 }
13189 else
13190 {
13191 /* memory access. */
13192 IEM_MC_BEGIN(3, 3, 0, 0);
13193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13195
13196 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13198
13199 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13200 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
13201 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13202
13203 IEM_MC_ARG_CONST(uint8_t, u8Src, u8Imm, 1);
13204 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13205 IEM_MC_FETCH_EFLAGS(EFlags);
13206 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13207
13208 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo);
13209 IEM_MC_COMMIT_EFLAGS(EFlags);
13210 IEM_MC_ADVANCE_RIP_AND_FINISH();
13211 IEM_MC_END();
13212 }
13213}
13214
13215
13216/** Opcode 0xf6 /4, /5, /6 and /7. */
13217FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
13218{
13219 if (IEM_IS_MODRM_REG_MODE(bRm))
13220 {
13221 /* register access */
13222 IEM_MC_BEGIN(3, 1, 0, 0);
13223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13224 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13225 IEM_MC_ARG(uint8_t, u8Value, 1);
13226 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13227 IEM_MC_LOCAL(int32_t, rc);
13228
13229 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13230 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13231 IEM_MC_REF_EFLAGS(pEFlags);
13232 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
13233 IEM_MC_IF_LOCAL_IS_Z(rc) {
13234 IEM_MC_ADVANCE_RIP_AND_FINISH();
13235 } IEM_MC_ELSE() {
13236 IEM_MC_RAISE_DIVIDE_ERROR();
13237 } IEM_MC_ENDIF();
13238
13239 IEM_MC_END();
13240 }
13241 else
13242 {
13243 /* memory access. */
13244 IEM_MC_BEGIN(3, 2, 0, 0);
13245 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13246 IEM_MC_ARG(uint8_t, u8Value, 1);
13247 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13248 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13249 IEM_MC_LOCAL(int32_t, rc);
13250
13251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13253 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13254 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13255 IEM_MC_REF_EFLAGS(pEFlags);
13256 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
13257 IEM_MC_IF_LOCAL_IS_Z(rc) {
13258 IEM_MC_ADVANCE_RIP_AND_FINISH();
13259 } IEM_MC_ELSE() {
13260 IEM_MC_RAISE_DIVIDE_ERROR();
13261 } IEM_MC_ENDIF();
13262
13263 IEM_MC_END();
13264 }
13265}
13266
13267
13268/** Opcode 0xf7 /4, /5, /6 and /7. */
13269FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
13270{
13271 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13272
13273 if (IEM_IS_MODRM_REG_MODE(bRm))
13274 {
13275 /* register access */
13276 switch (pVCpu->iem.s.enmEffOpSize)
13277 {
13278 case IEMMODE_16BIT:
13279 IEM_MC_BEGIN(4, 1, 0, 0);
13280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13281 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13282 IEM_MC_ARG(uint16_t *, pu16DX, 1);
13283 IEM_MC_ARG(uint16_t, u16Value, 2);
13284 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13285 IEM_MC_LOCAL(int32_t, rc);
13286
13287 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13288 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13289 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
13290 IEM_MC_REF_EFLAGS(pEFlags);
13291 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
13292 IEM_MC_IF_LOCAL_IS_Z(rc) {
13293 IEM_MC_ADVANCE_RIP_AND_FINISH();
13294 } IEM_MC_ELSE() {
13295 IEM_MC_RAISE_DIVIDE_ERROR();
13296 } IEM_MC_ENDIF();
13297
13298 IEM_MC_END();
13299 break;
13300
13301 case IEMMODE_32BIT:
13302 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386, 0);
13303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13304 IEM_MC_ARG(uint32_t *, pu32AX, 0);
13305 IEM_MC_ARG(uint32_t *, pu32DX, 1);
13306 IEM_MC_ARG(uint32_t, u32Value, 2);
13307 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13308 IEM_MC_LOCAL(int32_t, rc);
13309
13310 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13311 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
13312 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
13313 IEM_MC_REF_EFLAGS(pEFlags);
13314 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
13315 IEM_MC_IF_LOCAL_IS_Z(rc) {
13316 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
13317 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
13318 IEM_MC_ADVANCE_RIP_AND_FINISH();
13319 } IEM_MC_ELSE() {
13320 IEM_MC_RAISE_DIVIDE_ERROR();
13321 } IEM_MC_ENDIF();
13322
13323 IEM_MC_END();
13324 break;
13325
13326 case IEMMODE_64BIT:
13327 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0);
13328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13329 IEM_MC_ARG(uint64_t *, pu64AX, 0);
13330 IEM_MC_ARG(uint64_t *, pu64DX, 1);
13331 IEM_MC_ARG(uint64_t, u64Value, 2);
13332 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13333 IEM_MC_LOCAL(int32_t, rc);
13334
13335 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13336 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
13337 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
13338 IEM_MC_REF_EFLAGS(pEFlags);
13339 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
13340 IEM_MC_IF_LOCAL_IS_Z(rc) {
13341 IEM_MC_ADVANCE_RIP_AND_FINISH();
13342 } IEM_MC_ELSE() {
13343 IEM_MC_RAISE_DIVIDE_ERROR();
13344 } IEM_MC_ENDIF();
13345
13346 IEM_MC_END();
13347 break;
13348
13349 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13350 }
13351 }
13352 else
13353 {
13354 /* memory access. */
13355 switch (pVCpu->iem.s.enmEffOpSize)
13356 {
13357 case IEMMODE_16BIT:
13358 IEM_MC_BEGIN(4, 2, 0, 0);
13359 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13360 IEM_MC_ARG(uint16_t *, pu16DX, 1);
13361 IEM_MC_ARG(uint16_t, u16Value, 2);
13362 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13363 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13364 IEM_MC_LOCAL(int32_t, rc);
13365
13366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13368 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13369 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13370 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
13371 IEM_MC_REF_EFLAGS(pEFlags);
13372 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
13373 IEM_MC_IF_LOCAL_IS_Z(rc) {
13374 IEM_MC_ADVANCE_RIP_AND_FINISH();
13375 } IEM_MC_ELSE() {
13376 IEM_MC_RAISE_DIVIDE_ERROR();
13377 } IEM_MC_ENDIF();
13378
13379 IEM_MC_END();
13380 break;
13381
13382 case IEMMODE_32BIT:
13383 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386, 0);
13384 IEM_MC_ARG(uint32_t *, pu32AX, 0);
13385 IEM_MC_ARG(uint32_t *, pu32DX, 1);
13386 IEM_MC_ARG(uint32_t, u32Value, 2);
13387 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13389 IEM_MC_LOCAL(int32_t, rc);
13390
13391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13393 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13394 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
13395 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
13396 IEM_MC_REF_EFLAGS(pEFlags);
13397 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
13398 IEM_MC_IF_LOCAL_IS_Z(rc) {
13399 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
13400 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
13401 IEM_MC_ADVANCE_RIP_AND_FINISH();
13402 } IEM_MC_ELSE() {
13403 IEM_MC_RAISE_DIVIDE_ERROR();
13404 } IEM_MC_ENDIF();
13405
13406 IEM_MC_END();
13407 break;
13408
13409 case IEMMODE_64BIT:
13410 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT, 0);
13411 IEM_MC_ARG(uint64_t *, pu64AX, 0);
13412 IEM_MC_ARG(uint64_t *, pu64DX, 1);
13413 IEM_MC_ARG(uint64_t, u64Value, 2);
13414 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13415 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13416 IEM_MC_LOCAL(int32_t, rc);
13417
13418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13420 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13421 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
13422 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
13423 IEM_MC_REF_EFLAGS(pEFlags);
13424 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
13425 IEM_MC_IF_LOCAL_IS_Z(rc) {
13426 IEM_MC_ADVANCE_RIP_AND_FINISH();
13427 } IEM_MC_ELSE() {
13428 IEM_MC_RAISE_DIVIDE_ERROR();
13429 } IEM_MC_ENDIF();
13430
13431 IEM_MC_END();
13432 break;
13433
13434 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13435 }
13436 }
13437}
13438
13439
13440/**
13441 * @opmaps grp3_f6
13442 * @opcode /2
13443 */
13444FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
13445{
13446 IEMOP_MNEMONIC(not_Eb, "not Eb");
13447 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
13448}
13449
13450
13451/**
13452 * @opmaps grp3_f6
13453 * @opcode /3
13454 */
13455FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
13456{
13457 IEMOP_MNEMONIC(net_Eb, "neg Eb");
13458 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
13459}
13460
13461
13462/**
13463 * @opcode 0xf6
13464 */
13465FNIEMOP_DEF(iemOp_Grp3_Eb)
13466{
13467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13468 switch (IEM_GET_MODRM_REG_8(bRm))
13469 {
13470 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
13471 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
13472 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
13473 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
13474 case 4:
13475 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
13476 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13477 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
13478 case 5:
13479 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
13480 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13481 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
13482 case 6:
13483 IEMOP_MNEMONIC(div_Eb, "div Eb");
13484 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13485 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
13486 case 7:
13487 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
13488 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13489 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
13490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13491 }
13492}
13493
13494
13495/** Opcode 0xf7 /0. */
13496FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
13497{
13498 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
13499 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
13500
13501 if (IEM_IS_MODRM_REG_MODE(bRm))
13502 {
13503 /* register access */
13504 switch (pVCpu->iem.s.enmEffOpSize)
13505 {
13506 case IEMMODE_16BIT:
13507 IEM_MC_BEGIN(3, 0, 0, 0);
13508 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13510 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13511 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
13512 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13513 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13514 IEM_MC_REF_EFLAGS(pEFlags);
13515 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13516 IEM_MC_ADVANCE_RIP_AND_FINISH();
13517 IEM_MC_END();
13518 break;
13519
13520 case IEMMODE_32BIT:
13521 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
13522 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13524 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13525 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
13526 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13527 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13528 IEM_MC_REF_EFLAGS(pEFlags);
13529 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13530 /* No clearing the high dword here - test doesn't write back the result. */
13531 IEM_MC_ADVANCE_RIP_AND_FINISH();
13532 IEM_MC_END();
13533 break;
13534
13535 case IEMMODE_64BIT:
13536 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
13537 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13539 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13540 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
13541 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13542 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13543 IEM_MC_REF_EFLAGS(pEFlags);
13544 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13545 IEM_MC_ADVANCE_RIP_AND_FINISH();
13546 IEM_MC_END();
13547 break;
13548
13549 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13550 }
13551 }
13552 else
13553 {
13554 /* memory access. */
13555 switch (pVCpu->iem.s.enmEffOpSize)
13556 {
13557 case IEMMODE_16BIT:
13558 IEM_MC_BEGIN(3, 3, 0, 0);
13559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13561
13562 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13564
13565 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13566 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
13567 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13568
13569 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
13570 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13571 IEM_MC_FETCH_EFLAGS(EFlags);
13572 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13573
13574 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo);
13575 IEM_MC_COMMIT_EFLAGS(EFlags);
13576 IEM_MC_ADVANCE_RIP_AND_FINISH();
13577 IEM_MC_END();
13578 break;
13579
13580 case IEMMODE_32BIT:
13581 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
13582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13584
13585 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13587
13588 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13589 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
13590 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13591
13592 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
13593 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13594 IEM_MC_FETCH_EFLAGS(EFlags);
13595 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13596
13597 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo);
13598 IEM_MC_COMMIT_EFLAGS(EFlags);
13599 IEM_MC_ADVANCE_RIP_AND_FINISH();
13600 IEM_MC_END();
13601 break;
13602
13603 case IEMMODE_64BIT:
13604 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
13605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13607
13608 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13610
13611 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
13612 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13613 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13614
13615 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1);
13616 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13617 IEM_MC_FETCH_EFLAGS(EFlags);
13618 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13619
13620 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo);
13621 IEM_MC_COMMIT_EFLAGS(EFlags);
13622 IEM_MC_ADVANCE_RIP_AND_FINISH();
13623 IEM_MC_END();
13624 break;
13625
13626 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13627 }
13628 }
13629}
13630
13631
13632/** Opcode 0xf7 /2. */
13633FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
13634{
13635 IEMOP_MNEMONIC(not_Ev, "not Ev");
13636 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
13637 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
13638}
13639
13640
13641/** Opcode 0xf7 /3. */
13642FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
13643{
13644 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
13645 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
13646 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
13647}
13648
13649
13650/**
13651 * @opcode 0xf7
13652 */
13653FNIEMOP_DEF(iemOp_Grp3_Ev)
13654{
13655 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13656 switch (IEM_GET_MODRM_REG_8(bRm))
13657 {
13658 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13659 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13660 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
13661 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
13662 case 4:
13663 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
13664 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13665 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
13666 case 5:
13667 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
13668 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13669 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
13670 case 6:
13671 IEMOP_MNEMONIC(div_Ev, "div Ev");
13672 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13673 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
13674 case 7:
13675 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
13676 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13677 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
13678 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13679 }
13680}
13681
13682
13683/**
13684 * @opcode 0xf8
13685 */
13686FNIEMOP_DEF(iemOp_clc)
13687{
13688 IEMOP_MNEMONIC(clc, "clc");
13689 IEM_MC_BEGIN(0, 0, 0, 0);
13690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13691 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
13692 IEM_MC_ADVANCE_RIP_AND_FINISH();
13693 IEM_MC_END();
13694}
13695
13696
13697/**
13698 * @opcode 0xf9
13699 */
13700FNIEMOP_DEF(iemOp_stc)
13701{
13702 IEMOP_MNEMONIC(stc, "stc");
13703 IEM_MC_BEGIN(0, 0, 0, 0);
13704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13705 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
13706 IEM_MC_ADVANCE_RIP_AND_FINISH();
13707 IEM_MC_END();
13708}
13709
13710
13711/**
13712 * @opcode 0xfa
13713 */
13714FNIEMOP_DEF(iemOp_cli)
13715{
13716 IEMOP_MNEMONIC(cli, "cli");
13717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13718 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, 0, iemCImpl_cli);
13719}
13720
13721
13722FNIEMOP_DEF(iemOp_sti)
13723{
13724 IEMOP_MNEMONIC(sti, "sti");
13725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13726 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
13727 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, 0, iemCImpl_sti);
13728}
13729
13730
13731/**
13732 * @opcode 0xfc
13733 */
13734FNIEMOP_DEF(iemOp_cld)
13735{
13736 IEMOP_MNEMONIC(cld, "cld");
13737 IEM_MC_BEGIN(0, 0, 0, 0);
13738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13739 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
13740 IEM_MC_ADVANCE_RIP_AND_FINISH();
13741 IEM_MC_END();
13742}
13743
13744
13745/**
13746 * @opcode 0xfd
13747 */
13748FNIEMOP_DEF(iemOp_std)
13749{
13750 IEMOP_MNEMONIC(std, "std");
13751 IEM_MC_BEGIN(0, 0, 0, 0);
13752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13753 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
13754 IEM_MC_ADVANCE_RIP_AND_FINISH();
13755 IEM_MC_END();
13756}
13757
13758
13759/**
13760 * @opmaps grp4
13761 * @opcode /0
13762 */
13763FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
13764{
13765 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
13766 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
13767}
13768
13769
13770/**
13771 * @opmaps grp4
13772 * @opcode /1
13773 */
13774FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
13775{
13776 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
13777 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
13778}
13779
13780
13781/**
13782 * @opcode 0xfe
13783 */
13784FNIEMOP_DEF(iemOp_Grp4)
13785{
13786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13787 switch (IEM_GET_MODRM_REG_8(bRm))
13788 {
13789 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
13790 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
13791 default:
13792 /** @todo is the eff-addr decoded? */
13793 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
13794 IEMOP_RAISE_INVALID_OPCODE_RET();
13795 }
13796}
13797
13798/** Opcode 0xff /0. */
13799FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
13800{
13801 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
13802 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
13803 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
13804}
13805
13806
13807/** Opcode 0xff /1. */
13808FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
13809{
13810 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
13811 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
13812 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
13813}
13814
13815
13816/**
13817 * Opcode 0xff /2.
13818 * @param bRm The RM byte.
13819 */
13820FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
13821{
13822 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
13823 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13824
13825 if (IEM_IS_MODRM_REG_MODE(bRm))
13826 {
13827 /* The new RIP is taken from a register. */
13828 switch (pVCpu->iem.s.enmEffOpSize)
13829 {
13830 case IEMMODE_16BIT:
13831 IEM_MC_BEGIN(1, 0, 0, 0);
13832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13833 IEM_MC_ARG(uint16_t, u16Target, 0);
13834 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13835 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
13836 IEM_MC_END();
13837 break;
13838
13839 case IEMMODE_32BIT:
13840 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
13841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13842 IEM_MC_ARG(uint32_t, u32Target, 0);
13843 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13844 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
13845 IEM_MC_END();
13846 break;
13847
13848 case IEMMODE_64BIT:
13849 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
13850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13851 IEM_MC_ARG(uint64_t, u64Target, 0);
13852 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13853 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
13854 IEM_MC_END();
13855 break;
13856
13857 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13858 }
13859 }
13860 else
13861 {
13862 /* The new RIP is taken from a register. */
13863 switch (pVCpu->iem.s.enmEffOpSize)
13864 {
13865 case IEMMODE_16BIT:
13866 IEM_MC_BEGIN(1, 1, 0, 0);
13867 IEM_MC_ARG(uint16_t, u16Target, 0);
13868 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13871 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13872 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
13873 IEM_MC_END();
13874 break;
13875
13876 case IEMMODE_32BIT:
13877 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
13878 IEM_MC_ARG(uint32_t, u32Target, 0);
13879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13882 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13883 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
13884 IEM_MC_END();
13885 break;
13886
13887 case IEMMODE_64BIT:
13888 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT, 0);
13889 IEM_MC_ARG(uint64_t, u64Target, 0);
13890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13893 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13894 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
13895 IEM_MC_END();
13896 break;
13897
13898 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13899 }
13900 }
13901}
13902
13903#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl, a_fCImplExtra) \
13904 /* Registers? How?? */ \
13905 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
13906 { /* likely */ } \
13907 else \
13908 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
13909 \
13910 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
13911 /** @todo what does VIA do? */ \
13912 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
13913 { /* likely */ } \
13914 else \
13915 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
13916 \
13917 /* Far pointer loaded from memory. */ \
13918 switch (pVCpu->iem.s.enmEffOpSize) \
13919 { \
13920 case IEMMODE_16BIT: \
13921 IEM_MC_BEGIN(3, 1, 0, 0); \
13922 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13923 IEM_MC_ARG(uint16_t, offSeg, 1); \
13924 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
13925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13928 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13929 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
13930 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
13931 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
13932 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13933 IEM_MC_END(); \
13934 break; \
13935 \
13936 case IEMMODE_32BIT: \
13937 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
13938 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13939 IEM_MC_ARG(uint32_t, offSeg, 1); \
13940 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
13941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13944 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13945 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
13946 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
13947 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
13948 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13949 IEM_MC_END(); \
13950 break; \
13951 \
13952 case IEMMODE_64BIT: \
13953 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
13954 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0); \
13955 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13956 IEM_MC_ARG(uint64_t, offSeg, 1); \
13957 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
13958 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13961 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13962 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
13963 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
13964 | IEM_CIMPL_F_MODE /* no gates */, 0, \
13965 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13966 IEM_MC_END(); \
13967 break; \
13968 \
13969 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13970 } do {} while (0)
13971
13972
13973/**
13974 * Opcode 0xff /3.
13975 * @param bRm The RM byte.
13976 */
13977FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
13978{
13979 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
13980 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf, IEM_CIMPL_F_BRANCH_STACK);
13981}
13982
13983
13984/**
13985 * Opcode 0xff /4.
13986 * @param bRm The RM byte.
13987 */
13988FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
13989{
13990 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
13991 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13992
13993 if (IEM_IS_MODRM_REG_MODE(bRm))
13994 {
13995 /* The new RIP is taken from a register. */
13996 switch (pVCpu->iem.s.enmEffOpSize)
13997 {
13998 case IEMMODE_16BIT:
13999 IEM_MC_BEGIN(0, 1, 0, 0);
14000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14001 IEM_MC_LOCAL(uint16_t, u16Target);
14002 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14003 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14004 IEM_MC_END();
14005 break;
14006
14007 case IEMMODE_32BIT:
14008 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
14009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14010 IEM_MC_LOCAL(uint32_t, u32Target);
14011 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14012 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14013 IEM_MC_END();
14014 break;
14015
14016 case IEMMODE_64BIT:
14017 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
14018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14019 IEM_MC_LOCAL(uint64_t, u64Target);
14020 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14021 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14022 IEM_MC_END();
14023 break;
14024
14025 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14026 }
14027 }
14028 else
14029 {
14030 /* The new RIP is taken from a memory location. */
14031 switch (pVCpu->iem.s.enmEffOpSize)
14032 {
14033 case IEMMODE_16BIT:
14034 IEM_MC_BEGIN(0, 2, 0, 0);
14035 IEM_MC_LOCAL(uint16_t, u16Target);
14036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14039 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14040 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14041 IEM_MC_END();
14042 break;
14043
14044 case IEMMODE_32BIT:
14045 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
14046 IEM_MC_LOCAL(uint32_t, u32Target);
14047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14050 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14051 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14052 IEM_MC_END();
14053 break;
14054
14055 case IEMMODE_64BIT:
14056 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14057 IEM_MC_LOCAL(uint64_t, u64Target);
14058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14061 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14062 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14063 IEM_MC_END();
14064 break;
14065
14066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14067 }
14068 }
14069}
14070
14071
14072/**
14073 * Opcode 0xff /5.
14074 * @param bRm The RM byte.
14075 */
14076FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
14077{
14078 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
14079 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp, 0);
14080}
14081
14082
14083/**
14084 * Opcode 0xff /6.
14085 * @param bRm The RM byte.
14086 */
14087FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
14088{
14089 IEMOP_MNEMONIC(push_Ev, "push Ev");
14090
14091 /* Registers are handled by a common worker. */
14092 if (IEM_IS_MODRM_REG_MODE(bRm))
14093 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
14094
14095 /* Memory we do here. */
14096 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14097 switch (pVCpu->iem.s.enmEffOpSize)
14098 {
14099 case IEMMODE_16BIT:
14100 IEM_MC_BEGIN(0, 2, 0, 0);
14101 IEM_MC_LOCAL(uint16_t, u16Src);
14102 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14105 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14106 IEM_MC_PUSH_U16(u16Src);
14107 IEM_MC_ADVANCE_RIP_AND_FINISH();
14108 IEM_MC_END();
14109 break;
14110
14111 case IEMMODE_32BIT:
14112 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
14113 IEM_MC_LOCAL(uint32_t, u32Src);
14114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14115 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14117 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14118 IEM_MC_PUSH_U32(u32Src);
14119 IEM_MC_ADVANCE_RIP_AND_FINISH();
14120 IEM_MC_END();
14121 break;
14122
14123 case IEMMODE_64BIT:
14124 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14125 IEM_MC_LOCAL(uint64_t, u64Src);
14126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14129 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14130 IEM_MC_PUSH_U64(u64Src);
14131 IEM_MC_ADVANCE_RIP_AND_FINISH();
14132 IEM_MC_END();
14133 break;
14134
14135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14136 }
14137}
14138
14139
14140/**
14141 * @opcode 0xff
14142 */
14143FNIEMOP_DEF(iemOp_Grp5)
14144{
14145 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14146 switch (IEM_GET_MODRM_REG_8(bRm))
14147 {
14148 case 0:
14149 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
14150 case 1:
14151 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
14152 case 2:
14153 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
14154 case 3:
14155 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
14156 case 4:
14157 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
14158 case 5:
14159 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
14160 case 6:
14161 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
14162 case 7:
14163 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
14164 IEMOP_RAISE_INVALID_OPCODE_RET();
14165 }
14166 AssertFailedReturn(VERR_IEM_IPE_3);
14167}
14168
14169
14170
14171const PFNIEMOP g_apfnOneByteMap[256] =
14172{
14173 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
14174 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
14175 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
14176 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
14177 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
14178 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
14179 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
14180 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
14181 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
14182 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
14183 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
14184 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
14185 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
14186 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
14187 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
14188 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
14189 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
14190 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
14191 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
14192 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
14193 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
14194 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
14195 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
14196 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
14197 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
14198 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
14199 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
14200 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
14201 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
14202 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
14203 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
14204 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
14205 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
14206 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
14207 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
14208 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
14209 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
14210 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
14211 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
14212 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
14213 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
14214 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
14215 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
14216 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
14217 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
14218 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
14219 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
14220 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
14221 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
14222 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
14223 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
14224 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
14225 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
14226 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
14227 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
14228 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
14229 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
14230 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
14231 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
14232 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
14233 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
14234 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
14235 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
14236 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
14237};
14238
14239
14240/** @} */
14241
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette