VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 100854

Last change on this file since 100854 was 100854, checked in by vboxsync, 20 months ago

VMM/IEM: In order to get rid of most impossible threaded functions, an IEM_MC_F_XXX parameter is added to IEM_MC_BEGIN that allows specifying if a block is only for 64-bit mode or 386+ or not for 286 or older. It can be extended with more info later, as needed. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 488.3 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 100854 2023-08-11 01:29:04Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 *
63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
64 */
65#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8) \
66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
67 \
68 /* \
69 * If rm is denoting a register, no more instruction bytes. \
70 */ \
71 if (IEM_IS_MODRM_REG_MODE(bRm)) \
72 { \
73 IEM_MC_BEGIN(3, 0, 0); \
74 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
75 IEM_MC_ARG(uint8_t, u8Src, 1); \
76 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
77 \
78 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
79 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
80 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
81 IEM_MC_REF_EFLAGS(pEFlags); \
82 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
83 \
84 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
85 IEM_MC_END(); \
86 } \
87 else \
88 { \
89 /* \
90 * We're accessing memory. \
91 * Note! We're putting the eflags on the stack here so we can commit them \
92 * after the memory. \
93 */ \
94 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
95 { \
96 IEM_MC_BEGIN(3, 3, 0); \
97 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
98 IEM_MC_ARG(uint8_t, u8Src, 1); \
99 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
101 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
102 \
103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
104 IEMOP_HLP_DONE_DECODING(); \
105 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
106 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
107 IEM_MC_FETCH_EFLAGS(EFlags); \
108 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
109 \
110 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
111 IEM_MC_COMMIT_EFLAGS(EFlags); \
112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
113 IEM_MC_END(); \
114 } \
115 else \
116 { \
117 (void)0
118
119/**
120 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
121 * operands.
122 *
123 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
124 */
125#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8) \
126 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
127 \
128 /* \
129 * If rm is denoting a register, no more instruction bytes. \
130 */ \
131 if (IEM_IS_MODRM_REG_MODE(bRm)) \
132 { \
133 IEM_MC_BEGIN(3, 0, 0); \
134 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
135 IEM_MC_ARG(uint8_t, u8Src, 1); \
136 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
137 \
138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
139 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
140 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
141 IEM_MC_REF_EFLAGS(pEFlags); \
142 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
143 \
144 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
145 IEM_MC_END(); \
146 } \
147 else \
148 { \
149 /* \
150 * We're accessing memory. \
151 * Note! We're putting the eflags on the stack here so we can commit them \
152 * after the memory. \
153 */ \
154 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
155 { \
156 IEM_MC_BEGIN(3, 3, 0); \
157 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
158 IEM_MC_ARG(uint8_t, u8Src, 1); \
159 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
161 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
162 \
163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
164 IEMOP_HLP_DONE_DECODING(); \
165 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
166 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
167 IEM_MC_FETCH_EFLAGS(EFlags); \
168 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
169 \
170 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
171 IEM_MC_COMMIT_EFLAGS(EFlags); \
172 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
173 IEM_MC_END(); \
174 } \
175 else \
176 { \
177 (void)0
178
179#define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \
180 IEMOP_HLP_DONE_DECODING(); \
181 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
182 } \
183 } \
184 (void)0
185
186#define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \
187 IEM_MC_BEGIN(3, 3, 0); \
188 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
189 IEM_MC_ARG(uint8_t, u8Src, 1); \
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
192 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
193 \
194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
195 IEMOP_HLP_DONE_DECODING(); \
196 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
197 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
198 IEM_MC_FETCH_EFLAGS(EFlags); \
199 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
200 \
201 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bMapInfoDst); \
202 IEM_MC_COMMIT_EFLAGS(EFlags); \
203 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
204 IEM_MC_END(); \
205 } \
206 } \
207 (void)0
208
209/**
210 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
211 * destination.
212 */
213#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
215 \
216 /* \
217 * If rm is denoting a register, no more instruction bytes. \
218 */ \
219 if (IEM_IS_MODRM_REG_MODE(bRm)) \
220 { \
221 IEM_MC_BEGIN(3, 0, 0); \
222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
223 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
224 IEM_MC_ARG(uint8_t, u8Src, 1); \
225 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
226 \
227 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
228 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
229 IEM_MC_REF_EFLAGS(pEFlags); \
230 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
231 \
232 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
233 IEM_MC_END(); \
234 } \
235 else \
236 { \
237 /* \
238 * We're accessing memory. \
239 */ \
240 IEM_MC_BEGIN(3, 1, 0); \
241 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
242 IEM_MC_ARG(uint8_t, u8Src, 1); \
243 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
245 \
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
248 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
249 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
250 IEM_MC_REF_EFLAGS(pEFlags); \
251 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
252 \
253 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
254 IEM_MC_END(); \
255 } \
256 (void)0
257
258
259/**
260 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
261 * memory/register as the destination.
262 */
263#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
265 \
266 /* \
267 * If rm is denoting a register, no more instruction bytes. \
268 */ \
269 if (IEM_IS_MODRM_REG_MODE(bRm)) \
270 { \
271 switch (pVCpu->iem.s.enmEffOpSize) \
272 { \
273 case IEMMODE_16BIT: \
274 IEM_MC_BEGIN(3, 0, 0); \
275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
276 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
277 IEM_MC_ARG(uint16_t, u16Src, 1); \
278 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
279 \
280 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
281 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
282 IEM_MC_REF_EFLAGS(pEFlags); \
283 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
284 \
285 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
286 IEM_MC_END(); \
287 break; \
288 \
289 case IEMMODE_32BIT: \
290 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
292 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
293 IEM_MC_ARG(uint32_t, u32Src, 1); \
294 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
295 \
296 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
297 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
298 IEM_MC_REF_EFLAGS(pEFlags); \
299 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
300 \
301 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
302 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
303 IEM_MC_END(); \
304 break; \
305 \
306 case IEMMODE_64BIT: \
307 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT); \
308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
309 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
310 IEM_MC_ARG(uint64_t, u64Src, 1); \
311 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
312 \
313 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
314 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
315 IEM_MC_REF_EFLAGS(pEFlags); \
316 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
317 \
318 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
319 IEM_MC_END(); \
320 break; \
321 \
322 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
323 } \
324 } \
325 else \
326 { \
327 /* \
328 * We're accessing memory. \
329 * Note! We're putting the eflags on the stack here so we can commit them \
330 * after the memory. \
331 */ \
332 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
333 { \
334 switch (pVCpu->iem.s.enmEffOpSize) \
335 { \
336 case IEMMODE_16BIT: \
337 IEM_MC_BEGIN(3, 3, 0); \
338 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
339 IEM_MC_ARG(uint16_t, u16Src, 1); \
340 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
342 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
343 \
344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
345 IEMOP_HLP_DONE_DECODING(); \
346 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
347 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
348 IEM_MC_FETCH_EFLAGS(EFlags); \
349 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
350 \
351 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
352 IEM_MC_COMMIT_EFLAGS(EFlags); \
353 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
354 IEM_MC_END(); \
355 break; \
356 \
357 case IEMMODE_32BIT: \
358 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
359 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
360 IEM_MC_ARG(uint32_t, u32Src, 1); \
361 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
363 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
364 \
365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
366 IEMOP_HLP_DONE_DECODING(); \
367 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
368 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
369 IEM_MC_FETCH_EFLAGS(EFlags); \
370 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
371 \
372 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
373 IEM_MC_COMMIT_EFLAGS(EFlags); \
374 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
375 IEM_MC_END(); \
376 break; \
377 \
378 case IEMMODE_64BIT: \
379 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT); \
380 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
381 IEM_MC_ARG(uint64_t, u64Src, 1); \
382 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
384 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
385 \
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
387 IEMOP_HLP_DONE_DECODING(); \
388 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
389 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
390 IEM_MC_FETCH_EFLAGS(EFlags); \
391 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
392 \
393 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
394 IEM_MC_COMMIT_EFLAGS(EFlags); \
395 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
396 IEM_MC_END(); \
397 break; \
398 \
399 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
400 } \
401 } \
402 else \
403 { \
404 (void)0
405/* Separate macro to work around parsing issue in IEMAllInstPython.py */
406#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
407 switch (pVCpu->iem.s.enmEffOpSize) \
408 { \
409 case IEMMODE_16BIT: \
410 IEM_MC_BEGIN(3, 3, 0); \
411 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
412 IEM_MC_ARG(uint16_t, u16Src, 1); \
413 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
415 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
416 \
417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
418 IEMOP_HLP_DONE_DECODING(); \
419 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
420 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
421 IEM_MC_FETCH_EFLAGS(EFlags); \
422 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
423 \
424 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
425 IEM_MC_COMMIT_EFLAGS(EFlags); \
426 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
427 IEM_MC_END(); \
428 break; \
429 \
430 case IEMMODE_32BIT: \
431 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
432 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
433 IEM_MC_ARG(uint32_t, u32Src, 1); \
434 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
436 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
437 \
438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
439 IEMOP_HLP_DONE_DECODING(); \
440 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
441 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
442 IEM_MC_FETCH_EFLAGS(EFlags); \
443 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
444 \
445 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo /* CMP,TEST */); \
446 IEM_MC_COMMIT_EFLAGS(EFlags); \
447 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
448 IEM_MC_END(); \
449 break; \
450 \
451 case IEMMODE_64BIT: \
452 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT); \
453 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
454 IEM_MC_ARG(uint64_t, u64Src, 1); \
455 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
457 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
458 \
459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
460 IEMOP_HLP_DONE_DECODING(); \
461 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
462 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
463 IEM_MC_FETCH_EFLAGS(EFlags); \
464 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
465 \
466 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
467 IEM_MC_COMMIT_EFLAGS(EFlags); \
468 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
469 IEM_MC_END(); \
470 break; \
471 \
472 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
473 } \
474 } \
475 } \
476 (void)0
477
478/**
479 * Body for read-only word/dword/qword instructions like TEST and CMP with
480 * memory/register as the destination.
481 */
482#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
484 \
485 /* \
486 * If rm is denoting a register, no more instruction bytes. \
487 */ \
488 if (IEM_IS_MODRM_REG_MODE(bRm)) \
489 { \
490 switch (pVCpu->iem.s.enmEffOpSize) \
491 { \
492 case IEMMODE_16BIT: \
493 IEM_MC_BEGIN(3, 0, 0); \
494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
495 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
496 IEM_MC_ARG(uint16_t, u16Src, 1); \
497 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
498 \
499 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
500 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
501 IEM_MC_REF_EFLAGS(pEFlags); \
502 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
503 \
504 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
505 IEM_MC_END(); \
506 break; \
507 \
508 case IEMMODE_32BIT: \
509 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
511 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
512 IEM_MC_ARG(uint32_t, u32Src, 1); \
513 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
514 \
515 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
516 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
517 IEM_MC_REF_EFLAGS(pEFlags); \
518 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
519 \
520 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
521 IEM_MC_END(); \
522 break; \
523 \
524 case IEMMODE_64BIT: \
525 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT); \
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
527 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
528 IEM_MC_ARG(uint64_t, u64Src, 1); \
529 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
530 \
531 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
532 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
533 IEM_MC_REF_EFLAGS(pEFlags); \
534 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
535 \
536 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
537 IEM_MC_END(); \
538 break; \
539 \
540 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
541 } \
542 } \
543 else \
544 { \
545 /* \
546 * We're accessing memory. \
547 * Note! We're putting the eflags on the stack here so we can commit them \
548 * after the memory. \
549 */ \
550 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
551 { \
552 switch (pVCpu->iem.s.enmEffOpSize) \
553 { \
554 case IEMMODE_16BIT: \
555 IEM_MC_BEGIN(3, 3, 0); \
556 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
557 IEM_MC_ARG(uint16_t, u16Src, 1); \
558 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
560 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
561 \
562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
563 IEMOP_HLP_DONE_DECODING(); \
564 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
565 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
566 IEM_MC_FETCH_EFLAGS(EFlags); \
567 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
568 \
569 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
570 IEM_MC_COMMIT_EFLAGS(EFlags); \
571 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
572 IEM_MC_END(); \
573 break; \
574 \
575 case IEMMODE_32BIT: \
576 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
577 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
578 IEM_MC_ARG(uint32_t, u32Src, 1); \
579 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
581 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
582 \
583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
584 IEMOP_HLP_DONE_DECODING(); \
585 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
586 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
587 IEM_MC_FETCH_EFLAGS(EFlags); \
588 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
589 \
590 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
591 IEM_MC_COMMIT_EFLAGS(EFlags); \
592 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
593 IEM_MC_END(); \
594 break; \
595 \
596 case IEMMODE_64BIT: \
597 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT); \
598 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
599 IEM_MC_ARG(uint64_t, u64Src, 1); \
600 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
602 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
603 \
604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
605 IEMOP_HLP_DONE_DECODING(); \
606 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
607 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
608 IEM_MC_FETCH_EFLAGS(EFlags); \
609 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
610 \
611 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
612 IEM_MC_COMMIT_EFLAGS(EFlags); \
613 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
614 IEM_MC_END(); \
615 break; \
616 \
617 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
618 } \
619 } \
620 else \
621 { \
622 IEMOP_HLP_DONE_DECODING(); \
623 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
624 } \
625 } \
626 (void)0
627
628
629/**
630 * Body for instructions like ADD, AND, OR, ++ with working on AL with
631 * a byte immediate.
632 */
633#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
634 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
635 \
636 IEM_MC_BEGIN(3, 0, 0); \
637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
638 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
639 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
640 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
641 \
642 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
643 IEM_MC_REF_EFLAGS(pEFlags); \
644 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
645 \
646 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
647 IEM_MC_END()
648
649/**
650 * Body for instructions like ADD, AND, OR, ++ with working on
651 * AX/EAX/RAX with a word/dword immediate.
652 */
653#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
654 switch (pVCpu->iem.s.enmEffOpSize) \
655 { \
656 case IEMMODE_16BIT: \
657 { \
658 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
659 \
660 IEM_MC_BEGIN(3, 0, 0); \
661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
662 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
663 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
664 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
665 \
666 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
667 IEM_MC_REF_EFLAGS(pEFlags); \
668 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
669 \
670 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
671 IEM_MC_END(); \
672 } \
673 \
674 case IEMMODE_32BIT: \
675 { \
676 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
677 \
678 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
680 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
681 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
682 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
683 \
684 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
685 IEM_MC_REF_EFLAGS(pEFlags); \
686 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
687 \
688 if (a_fModifiesDstReg) \
689 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
690 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
691 IEM_MC_END(); \
692 } \
693 \
694 case IEMMODE_64BIT: \
695 { \
696 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
697 \
698 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT); \
699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
700 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
701 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
702 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
703 \
704 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
705 IEM_MC_REF_EFLAGS(pEFlags); \
706 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
707 \
708 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
709 IEM_MC_END(); \
710 } \
711 \
712 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
713 } \
714 (void)0
715
716
717
718/* Instruction specification format - work in progress: */
719
720/**
721 * @opcode 0x00
722 * @opmnemonic add
723 * @op1 rm:Eb
724 * @op2 reg:Gb
725 * @opmaps one
726 * @openc ModR/M
727 * @opflmodify cf,pf,af,zf,sf,of
728 * @ophints harmless ignores_op_sizes
729 * @opstats add_Eb_Gb
730 * @opgroup og_gen_arith_bin
731 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
732 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
733 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
734 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
735 */
736FNIEMOP_DEF(iemOp_add_Eb_Gb)
737{
738 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
739 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_add_u8);
740 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked);
741}
742
743
744/**
745 * @opcode 0x01
746 * @opgroup og_gen_arith_bin
747 * @opflmodify cf,pf,af,zf,sf,of
748 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
749 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
750 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
751 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
752 */
753FNIEMOP_DEF(iemOp_add_Ev_Gv)
754{
755 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
756 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
757 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
758}
759
760
761/**
762 * @opcode 0x02
763 * @opgroup og_gen_arith_bin
764 * @opflmodify cf,pf,af,zf,sf,of
765 * @opcopytests iemOp_add_Eb_Gb
766 */
767FNIEMOP_DEF(iemOp_add_Gb_Eb)
768{
769 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
770 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
771}
772
773
774/**
775 * @opcode 0x03
776 * @opgroup og_gen_arith_bin
777 * @opflmodify cf,pf,af,zf,sf,of
778 * @opcopytests iemOp_add_Ev_Gv
779 */
780FNIEMOP_DEF(iemOp_add_Gv_Ev)
781{
782 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
783 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
784}
785
786
787/**
788 * @opcode 0x04
789 * @opgroup og_gen_arith_bin
790 * @opflmodify cf,pf,af,zf,sf,of
791 * @opcopytests iemOp_add_Eb_Gb
792 */
793FNIEMOP_DEF(iemOp_add_Al_Ib)
794{
795 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
796 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
797}
798
799
800/**
801 * @opcode 0x05
802 * @opgroup og_gen_arith_bin
803 * @opflmodify cf,pf,af,zf,sf,of
804 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
805 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
806 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
807 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
808 */
809FNIEMOP_DEF(iemOp_add_eAX_Iz)
810{
811 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
812 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
813}
814
815
816/**
817 * @opcode 0x06
818 * @opgroup og_stack_sreg
819 */
820FNIEMOP_DEF(iemOp_push_ES)
821{
822 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
823 IEMOP_HLP_NO_64BIT();
824 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
825}
826
827
828/**
829 * @opcode 0x07
830 * @opgroup og_stack_sreg
831 */
832FNIEMOP_DEF(iemOp_pop_ES)
833{
834 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
835 IEMOP_HLP_NO_64BIT();
836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
837 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
838}
839
840
841/**
842 * @opcode 0x08
843 * @opgroup og_gen_arith_bin
844 * @opflmodify cf,pf,af,zf,sf,of
845 * @opflundef af
846 * @opflclear of,cf
847 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
848 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
849 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
850 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
851 */
852FNIEMOP_DEF(iemOp_or_Eb_Gb)
853{
854 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
855 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
856 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_or_u8);
857 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked);
858}
859
860
861/*
862 * @opcode 0x09
863 * @opgroup og_gen_arith_bin
864 * @opflmodify cf,pf,af,zf,sf,of
865 * @opflundef af
866 * @opflclear of,cf
867 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
868 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
869 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
870 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
871 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
872 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
873 */
874FNIEMOP_DEF(iemOp_or_Ev_Gv)
875{
876 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
877 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
878 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
879 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
880}
881
882
883/**
884 * @opcode 0x0a
885 * @opgroup og_gen_arith_bin
886 * @opflmodify cf,pf,af,zf,sf,of
887 * @opflundef af
888 * @opflclear of,cf
889 * @opcopytests iemOp_or_Eb_Gb
890 */
891FNIEMOP_DEF(iemOp_or_Gb_Eb)
892{
893 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
894 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
895 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
896}
897
898
899/**
900 * @opcode 0x0b
901 * @opgroup og_gen_arith_bin
902 * @opflmodify cf,pf,af,zf,sf,of
903 * @opflundef af
904 * @opflclear of,cf
905 * @opcopytests iemOp_or_Ev_Gv
906 */
907FNIEMOP_DEF(iemOp_or_Gv_Ev)
908{
909 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
910 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
911 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
912}
913
914
915/**
916 * @opcode 0x0c
917 * @opgroup og_gen_arith_bin
918 * @opflmodify cf,pf,af,zf,sf,of
919 * @opflundef af
920 * @opflclear of,cf
921 * @opcopytests iemOp_or_Eb_Gb
922 */
923FNIEMOP_DEF(iemOp_or_Al_Ib)
924{
925 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
926 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
927 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
928}
929
930
931/**
932 * @opcode 0x0d
933 * @opgroup og_gen_arith_bin
934 * @opflmodify cf,pf,af,zf,sf,of
935 * @opflundef af
936 * @opflclear of,cf
937 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
938 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
939 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
940 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
941 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
942 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
943 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
944 */
945FNIEMOP_DEF(iemOp_or_eAX_Iz)
946{
947 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
948 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
949 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
950}
951
952
953/**
954 * @opcode 0x0e
955 * @opgroup og_stack_sreg
956 */
957FNIEMOP_DEF(iemOp_push_CS)
958{
959 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
960 IEMOP_HLP_NO_64BIT();
961 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
962}
963
964
965/**
966 * @opcode 0x0f
967 * @opmnemonic EscTwo0f
968 * @openc two0f
969 * @opdisenum OP_2B_ESC
970 * @ophints harmless
971 * @opgroup og_escapes
972 */
973FNIEMOP_DEF(iemOp_2byteEscape)
974{
975#if 0 /// @todo def VBOX_STRICT
976 /* Sanity check the table the first time around. */
977 static bool s_fTested = false;
978 if (RT_LIKELY(s_fTested)) { /* likely */ }
979 else
980 {
981 s_fTested = true;
982 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
983 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
984 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
985 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
986 }
987#endif
988
989 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
990 {
991 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
992 IEMOP_HLP_MIN_286();
993 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
994 }
995 /* @opdone */
996
997 /*
998 * On the 8086 this is a POP CS instruction.
999 * For the time being we don't specify this this.
1000 */
1001 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
1002 IEMOP_HLP_NO_64BIT();
1003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1004 /** @todo eliminate END_TB here */
1005 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1006 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1007}
1008
1009/**
1010 * @opcode 0x10
1011 * @opgroup og_gen_arith_bin
1012 * @opfltest cf
1013 * @opflmodify cf,pf,af,zf,sf,of
1014 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1015 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1016 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1017 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1018 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1019 */
1020FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1021{
1022 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1023 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_adc_u8);
1024 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked);
1025}
1026
1027
1028/**
1029 * @opcode 0x11
1030 * @opgroup og_gen_arith_bin
1031 * @opfltest cf
1032 * @opflmodify cf,pf,af,zf,sf,of
1033 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1034 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1035 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1036 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1037 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1038 */
1039FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1040{
1041 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1042 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
1043 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1044}
1045
1046
1047/**
1048 * @opcode 0x12
1049 * @opgroup og_gen_arith_bin
1050 * @opfltest cf
1051 * @opflmodify cf,pf,af,zf,sf,of
1052 * @opcopytests iemOp_adc_Eb_Gb
1053 */
1054FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1055{
1056 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1057 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
1058}
1059
1060
1061/**
1062 * @opcode 0x13
1063 * @opgroup og_gen_arith_bin
1064 * @opfltest cf
1065 * @opflmodify cf,pf,af,zf,sf,of
1066 * @opcopytests iemOp_adc_Ev_Gv
1067 */
1068FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1069{
1070 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1071 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1072}
1073
1074
1075/**
1076 * @opcode 0x14
1077 * @opgroup og_gen_arith_bin
1078 * @opfltest cf
1079 * @opflmodify cf,pf,af,zf,sf,of
1080 * @opcopytests iemOp_adc_Eb_Gb
1081 */
1082FNIEMOP_DEF(iemOp_adc_Al_Ib)
1083{
1084 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1085 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1086}
1087
1088
1089/**
1090 * @opcode 0x15
1091 * @opgroup og_gen_arith_bin
1092 * @opfltest cf
1093 * @opflmodify cf,pf,af,zf,sf,of
1094 * @opcopytests iemOp_adc_Ev_Gv
1095 */
1096FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1097{
1098 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1099 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1100}
1101
1102
1103/**
1104 * @opcode 0x16
1105 */
1106FNIEMOP_DEF(iemOp_push_SS)
1107{
1108 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1109 IEMOP_HLP_NO_64BIT();
1110 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1111}
1112
1113
1114/**
1115 * @opcode 0x17
1116 * @opgroup og_gen_arith_bin
1117 * @opfltest cf
1118 * @opflmodify cf,pf,af,zf,sf,of
1119 */
1120FNIEMOP_DEF(iemOp_pop_SS)
1121{
1122 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1124 IEMOP_HLP_NO_64BIT();
1125 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1126}
1127
1128
1129/**
1130 * @opcode 0x18
1131 * @opgroup og_gen_arith_bin
1132 * @opfltest cf
1133 * @opflmodify cf,pf,af,zf,sf,of
1134 */
1135FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1136{
1137 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1138 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sbb_u8);
1139 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked);
1140}
1141
1142
1143/**
1144 * @opcode 0x19
1145 * @opgroup og_gen_arith_bin
1146 * @opfltest cf
1147 * @opflmodify cf,pf,af,zf,sf,of
1148 */
1149FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1150{
1151 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1152 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
1153 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1154}
1155
1156
1157/**
1158 * @opcode 0x1a
1159 * @opgroup og_gen_arith_bin
1160 * @opfltest cf
1161 * @opflmodify cf,pf,af,zf,sf,of
1162 */
1163FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1164{
1165 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1166 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
1167}
1168
1169
1170/**
1171 * @opcode 0x1b
1172 * @opgroup og_gen_arith_bin
1173 * @opfltest cf
1174 * @opflmodify cf,pf,af,zf,sf,of
1175 */
1176FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1177{
1178 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1179 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1180}
1181
1182
1183/**
1184 * @opcode 0x1c
1185 * @opgroup og_gen_arith_bin
1186 * @opfltest cf
1187 * @opflmodify cf,pf,af,zf,sf,of
1188 */
1189FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1190{
1191 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1192 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1193}
1194
1195
1196/**
1197 * @opcode 0x1d
1198 * @opgroup og_gen_arith_bin
1199 * @opfltest cf
1200 * @opflmodify cf,pf,af,zf,sf,of
1201 */
1202FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1203{
1204 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1205 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1206}
1207
1208
1209/**
1210 * @opcode 0x1e
1211 * @opgroup og_stack_sreg
1212 */
1213FNIEMOP_DEF(iemOp_push_DS)
1214{
1215 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1216 IEMOP_HLP_NO_64BIT();
1217 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1218}
1219
1220
1221/**
1222 * @opcode 0x1f
1223 * @opgroup og_stack_sreg
1224 */
1225FNIEMOP_DEF(iemOp_pop_DS)
1226{
1227 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1229 IEMOP_HLP_NO_64BIT();
1230 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1231}
1232
1233
1234/**
1235 * @opcode 0x20
1236 * @opgroup og_gen_arith_bin
1237 * @opflmodify cf,pf,af,zf,sf,of
1238 * @opflundef af
1239 * @opflclear of,cf
1240 */
1241FNIEMOP_DEF(iemOp_and_Eb_Gb)
1242{
1243 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1244 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1245 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_and_u8);
1246 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked);
1247}
1248
1249
1250/**
1251 * @opcode 0x21
1252 * @opgroup og_gen_arith_bin
1253 * @opflmodify cf,pf,af,zf,sf,of
1254 * @opflundef af
1255 * @opflclear of,cf
1256 */
1257FNIEMOP_DEF(iemOp_and_Ev_Gv)
1258{
1259 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1260 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1261 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
1262 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1263}
1264
1265
1266/**
1267 * @opcode 0x22
1268 * @opgroup og_gen_arith_bin
1269 * @opflmodify cf,pf,af,zf,sf,of
1270 * @opflundef af
1271 * @opflclear of,cf
1272 */
1273FNIEMOP_DEF(iemOp_and_Gb_Eb)
1274{
1275 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1276 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1277 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1278}
1279
1280
1281/**
1282 * @opcode 0x23
1283 * @opgroup og_gen_arith_bin
1284 * @opflmodify cf,pf,af,zf,sf,of
1285 * @opflundef af
1286 * @opflclear of,cf
1287 */
1288FNIEMOP_DEF(iemOp_and_Gv_Ev)
1289{
1290 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1291 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1292 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1293}
1294
1295
1296/**
1297 * @opcode 0x24
1298 * @opgroup og_gen_arith_bin
1299 * @opflmodify cf,pf,af,zf,sf,of
1300 * @opflundef af
1301 * @opflclear of,cf
1302 */
1303FNIEMOP_DEF(iemOp_and_Al_Ib)
1304{
1305 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1306 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1307 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1308}
1309
1310
1311/**
1312 * @opcode 0x25
1313 * @opgroup og_gen_arith_bin
1314 * @opflmodify cf,pf,af,zf,sf,of
1315 * @opflundef af
1316 * @opflclear of,cf
1317 */
1318FNIEMOP_DEF(iemOp_and_eAX_Iz)
1319{
1320 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1321 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1322 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1323}
1324
1325
1326/**
1327 * @opcode 0x26
1328 * @opmnemonic SEG
1329 * @op1 ES
1330 * @opgroup og_prefix
1331 * @openc prefix
1332 * @opdisenum OP_SEG
1333 * @ophints harmless
1334 */
1335FNIEMOP_DEF(iemOp_seg_ES)
1336{
1337 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1338 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1339 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1340
1341 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1342 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1343}
1344
1345
1346/**
1347 * @opcode 0x27
1348 * @opfltest af,cf
1349 * @opflmodify cf,pf,af,zf,sf,of
1350 * @opflundef of
1351 */
1352FNIEMOP_DEF(iemOp_daa)
1353{
1354 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1355 IEMOP_HLP_NO_64BIT();
1356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1357 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1358 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_daa);
1359}
1360
1361
1362/**
1363 * @opcode 0x28
1364 * @opgroup og_gen_arith_bin
1365 * @opflmodify cf,pf,af,zf,sf,of
1366 */
1367FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1368{
1369 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1370 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sub_u8);
1371 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked);
1372}
1373
1374
1375/**
1376 * @opcode 0x29
1377 * @opgroup og_gen_arith_bin
1378 * @opflmodify cf,pf,af,zf,sf,of
1379 */
1380FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1381{
1382 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1383 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
1384 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1385}
1386
1387
1388/**
1389 * @opcode 0x2a
1390 * @opgroup og_gen_arith_bin
1391 * @opflmodify cf,pf,af,zf,sf,of
1392 */
1393FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1394{
1395 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1396 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1397}
1398
1399
1400/**
1401 * @opcode 0x2b
1402 * @opgroup og_gen_arith_bin
1403 * @opflmodify cf,pf,af,zf,sf,of
1404 */
1405FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1406{
1407 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1408 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1409}
1410
1411
1412/**
1413 * @opcode 0x2c
1414 * @opgroup og_gen_arith_bin
1415 * @opflmodify cf,pf,af,zf,sf,of
1416 */
1417FNIEMOP_DEF(iemOp_sub_Al_Ib)
1418{
1419 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1420 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1421}
1422
1423
1424/**
1425 * @opcode 0x2d
1426 * @opgroup og_gen_arith_bin
1427 * @opflmodify cf,pf,af,zf,sf,of
1428 */
1429FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1430{
1431 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1432 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1433}
1434
1435
1436/**
1437 * @opcode 0x2e
1438 * @opmnemonic SEG
1439 * @op1 CS
1440 * @opgroup og_prefix
1441 * @openc prefix
1442 * @opdisenum OP_SEG
1443 * @ophints harmless
1444 */
1445FNIEMOP_DEF(iemOp_seg_CS)
1446{
1447 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1448 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1449 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1450
1451 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1452 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1453}
1454
1455
1456/**
1457 * @opcode 0x2f
1458 * @opfltest af,cf
1459 * @opflmodify cf,pf,af,zf,sf,of
1460 * @opflundef of
1461 */
1462FNIEMOP_DEF(iemOp_das)
1463{
1464 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1465 IEMOP_HLP_NO_64BIT();
1466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1467 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1468 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_das);
1469}
1470
1471
1472/**
1473 * @opcode 0x30
1474 * @opgroup og_gen_arith_bin
1475 * @opflmodify cf,pf,af,zf,sf,of
1476 * @opflundef af
1477 * @opflclear of,cf
1478 */
1479FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1480{
1481 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1482 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1483 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_xor_u8);
1484 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked);
1485}
1486
1487
1488/**
1489 * @opcode 0x31
1490 * @opgroup og_gen_arith_bin
1491 * @opflmodify cf,pf,af,zf,sf,of
1492 * @opflundef af
1493 * @opflclear of,cf
1494 */
1495FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1496{
1497 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1498 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1499 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
1500 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1501}
1502
1503
1504/**
1505 * @opcode 0x32
1506 * @opgroup og_gen_arith_bin
1507 * @opflmodify cf,pf,af,zf,sf,of
1508 * @opflundef af
1509 * @opflclear of,cf
1510 */
1511FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1512{
1513 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1514 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1515 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1516}
1517
1518
1519/**
1520 * @opcode 0x33
1521 * @opgroup og_gen_arith_bin
1522 * @opflmodify cf,pf,af,zf,sf,of
1523 * @opflundef af
1524 * @opflclear of,cf
1525 */
1526FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1527{
1528 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1529 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1530 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1531}
1532
1533
1534/**
1535 * @opcode 0x34
1536 * @opgroup og_gen_arith_bin
1537 * @opflmodify cf,pf,af,zf,sf,of
1538 * @opflundef af
1539 * @opflclear of,cf
1540 */
1541FNIEMOP_DEF(iemOp_xor_Al_Ib)
1542{
1543 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1544 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1545 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1546}
1547
1548
1549/**
1550 * @opcode 0x35
1551 * @opgroup og_gen_arith_bin
1552 * @opflmodify cf,pf,af,zf,sf,of
1553 * @opflundef af
1554 * @opflclear of,cf
1555 */
1556FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1557{
1558 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1559 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1560 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1561}
1562
1563
1564/**
1565 * @opcode 0x36
1566 * @opmnemonic SEG
1567 * @op1 SS
1568 * @opgroup og_prefix
1569 * @openc prefix
1570 * @opdisenum OP_SEG
1571 * @ophints harmless
1572 */
1573FNIEMOP_DEF(iemOp_seg_SS)
1574{
1575 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1576 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1577 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1578
1579 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1580 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1581}
1582
1583
1584/**
1585 * @opcode 0x37
1586 * @opfltest af,cf
1587 * @opflmodify cf,pf,af,zf,sf,of
1588 * @opflundef pf,zf,sf,of
1589 * @opgroup og_gen_arith_dec
1590 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1591 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1592 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1593 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1594 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1595 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1596 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1597 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1598 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1599 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1600 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1601 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1602 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1603 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1604 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1605 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1606 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1607 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1608 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1609 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1610 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1611 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1612 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1613 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1614 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1615 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1616 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1617 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1618 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1619 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1620 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1621 */
1622FNIEMOP_DEF(iemOp_aaa)
1623{
1624 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1625 IEMOP_HLP_NO_64BIT();
1626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1627 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1628
1629 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aaa);
1630}
1631
1632
1633/**
1634 * @opcode 0x38
1635 */
1636FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1637{
1638 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1639 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8);
1640 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
1641}
1642
1643
1644/**
1645 * @opcode 0x39
1646 */
1647FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1648{
1649 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1650 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
1651}
1652
1653
1654/**
1655 * @opcode 0x3a
1656 */
1657FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1658{
1659 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1660 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1661}
1662
1663
1664/**
1665 * @opcode 0x3b
1666 */
1667FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1668{
1669 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1670 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1671}
1672
1673
1674/**
1675 * @opcode 0x3c
1676 */
1677FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1678{
1679 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1680 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1681}
1682
1683
1684/**
1685 * @opcode 0x3d
1686 */
1687FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1688{
1689 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1690 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1691}
1692
1693
1694/**
1695 * @opcode 0x3e
1696 */
1697FNIEMOP_DEF(iemOp_seg_DS)
1698{
1699 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1700 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1701 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1702
1703 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1704 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1705}
1706
1707
1708/**
1709 * @opcode 0x3f
1710 * @opfltest af,cf
1711 * @opflmodify cf,pf,af,zf,sf,of
1712 * @opflundef pf,zf,sf,of
1713 * @opgroup og_gen_arith_dec
1714 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1715 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1716 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1717 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1718 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1719 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1720 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1721 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1722 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1723 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1724 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1725 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1726 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1727 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1728 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1729 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1730 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1731 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1732 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1733 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1734 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1735 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1736 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1737 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1738 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1739 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1740 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1741 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1742 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1743 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1744 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1745 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1746 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1747 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1748 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1749 */
1750FNIEMOP_DEF(iemOp_aas)
1751{
1752 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1753 IEMOP_HLP_NO_64BIT();
1754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1755 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1756
1757 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aas);
1758}
1759
1760
1761/**
1762 * Common 'inc/dec register' helper.
1763 *
1764 * Not for 64-bit code, only for what became the rex prefixes.
1765 */
1766#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1767 switch (pVCpu->iem.s.enmEffOpSize) \
1768 { \
1769 case IEMMODE_16BIT: \
1770 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT); \
1771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1772 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1773 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1774 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1775 IEM_MC_REF_EFLAGS(pEFlags); \
1776 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1777 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1778 IEM_MC_END(); \
1779 break; \
1780 \
1781 case IEMMODE_32BIT: \
1782 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); \
1783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1784 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1785 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1786 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1787 IEM_MC_REF_EFLAGS(pEFlags); \
1788 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1789 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
1790 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1791 IEM_MC_END(); \
1792 break; \
1793 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1794 } \
1795 (void)0
1796
1797/**
1798 * @opcode 0x40
1799 */
1800FNIEMOP_DEF(iemOp_inc_eAX)
1801{
1802 /*
1803 * This is a REX prefix in 64-bit mode.
1804 */
1805 if (IEM_IS_64BIT_CODE(pVCpu))
1806 {
1807 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1808 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1809
1810 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1811 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1812 }
1813
1814 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1815 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1816}
1817
1818
1819/**
1820 * @opcode 0x41
1821 */
1822FNIEMOP_DEF(iemOp_inc_eCX)
1823{
1824 /*
1825 * This is a REX prefix in 64-bit mode.
1826 */
1827 if (IEM_IS_64BIT_CODE(pVCpu))
1828 {
1829 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1830 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1831 pVCpu->iem.s.uRexB = 1 << 3;
1832
1833 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1834 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1835 }
1836
1837 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1838 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1839}
1840
1841
1842/**
1843 * @opcode 0x42
1844 */
1845FNIEMOP_DEF(iemOp_inc_eDX)
1846{
1847 /*
1848 * This is a REX prefix in 64-bit mode.
1849 */
1850 if (IEM_IS_64BIT_CODE(pVCpu))
1851 {
1852 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1853 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1854 pVCpu->iem.s.uRexIndex = 1 << 3;
1855
1856 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1857 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1858 }
1859
1860 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1861 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1862}
1863
1864
1865
1866/**
1867 * @opcode 0x43
1868 */
1869FNIEMOP_DEF(iemOp_inc_eBX)
1870{
1871 /*
1872 * This is a REX prefix in 64-bit mode.
1873 */
1874 if (IEM_IS_64BIT_CODE(pVCpu))
1875 {
1876 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1877 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1878 pVCpu->iem.s.uRexB = 1 << 3;
1879 pVCpu->iem.s.uRexIndex = 1 << 3;
1880
1881 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1882 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1883 }
1884
1885 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1886 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1887}
1888
1889
1890/**
1891 * @opcode 0x44
1892 */
1893FNIEMOP_DEF(iemOp_inc_eSP)
1894{
1895 /*
1896 * This is a REX prefix in 64-bit mode.
1897 */
1898 if (IEM_IS_64BIT_CODE(pVCpu))
1899 {
1900 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1901 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1902 pVCpu->iem.s.uRexReg = 1 << 3;
1903
1904 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1905 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1906 }
1907
1908 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1909 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1910}
1911
1912
1913/**
1914 * @opcode 0x45
1915 */
1916FNIEMOP_DEF(iemOp_inc_eBP)
1917{
1918 /*
1919 * This is a REX prefix in 64-bit mode.
1920 */
1921 if (IEM_IS_64BIT_CODE(pVCpu))
1922 {
1923 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1924 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1925 pVCpu->iem.s.uRexReg = 1 << 3;
1926 pVCpu->iem.s.uRexB = 1 << 3;
1927
1928 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1929 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1930 }
1931
1932 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1933 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1934}
1935
1936
1937/**
1938 * @opcode 0x46
1939 */
1940FNIEMOP_DEF(iemOp_inc_eSI)
1941{
1942 /*
1943 * This is a REX prefix in 64-bit mode.
1944 */
1945 if (IEM_IS_64BIT_CODE(pVCpu))
1946 {
1947 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1948 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1949 pVCpu->iem.s.uRexReg = 1 << 3;
1950 pVCpu->iem.s.uRexIndex = 1 << 3;
1951
1952 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1953 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1954 }
1955
1956 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1957 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1958}
1959
1960
1961/**
1962 * @opcode 0x47
1963 */
1964FNIEMOP_DEF(iemOp_inc_eDI)
1965{
1966 /*
1967 * This is a REX prefix in 64-bit mode.
1968 */
1969 if (IEM_IS_64BIT_CODE(pVCpu))
1970 {
1971 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1972 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1973 pVCpu->iem.s.uRexReg = 1 << 3;
1974 pVCpu->iem.s.uRexB = 1 << 3;
1975 pVCpu->iem.s.uRexIndex = 1 << 3;
1976
1977 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1978 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1979 }
1980
1981 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1982 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
1983}
1984
1985
1986/**
1987 * @opcode 0x48
1988 */
1989FNIEMOP_DEF(iemOp_dec_eAX)
1990{
1991 /*
1992 * This is a REX prefix in 64-bit mode.
1993 */
1994 if (IEM_IS_64BIT_CODE(pVCpu))
1995 {
1996 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1997 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1998 iemRecalEffOpSize(pVCpu);
1999
2000 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2001 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2002 }
2003
2004 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2005 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2006}
2007
2008
2009/**
2010 * @opcode 0x49
2011 */
2012FNIEMOP_DEF(iemOp_dec_eCX)
2013{
2014 /*
2015 * This is a REX prefix in 64-bit mode.
2016 */
2017 if (IEM_IS_64BIT_CODE(pVCpu))
2018 {
2019 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2020 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2021 pVCpu->iem.s.uRexB = 1 << 3;
2022 iemRecalEffOpSize(pVCpu);
2023
2024 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2025 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2026 }
2027
2028 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2029 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2030}
2031
2032
2033/**
2034 * @opcode 0x4a
2035 */
2036FNIEMOP_DEF(iemOp_dec_eDX)
2037{
2038 /*
2039 * This is a REX prefix in 64-bit mode.
2040 */
2041 if (IEM_IS_64BIT_CODE(pVCpu))
2042 {
2043 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2044 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2045 pVCpu->iem.s.uRexIndex = 1 << 3;
2046 iemRecalEffOpSize(pVCpu);
2047
2048 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2049 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2050 }
2051
2052 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2053 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2054}
2055
2056
2057/**
2058 * @opcode 0x4b
2059 */
2060FNIEMOP_DEF(iemOp_dec_eBX)
2061{
2062 /*
2063 * This is a REX prefix in 64-bit mode.
2064 */
2065 if (IEM_IS_64BIT_CODE(pVCpu))
2066 {
2067 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2068 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2069 pVCpu->iem.s.uRexB = 1 << 3;
2070 pVCpu->iem.s.uRexIndex = 1 << 3;
2071 iemRecalEffOpSize(pVCpu);
2072
2073 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2074 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2075 }
2076
2077 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2078 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2079}
2080
2081
2082/**
2083 * @opcode 0x4c
2084 */
2085FNIEMOP_DEF(iemOp_dec_eSP)
2086{
2087 /*
2088 * This is a REX prefix in 64-bit mode.
2089 */
2090 if (IEM_IS_64BIT_CODE(pVCpu))
2091 {
2092 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2093 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2094 pVCpu->iem.s.uRexReg = 1 << 3;
2095 iemRecalEffOpSize(pVCpu);
2096
2097 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2098 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2099 }
2100
2101 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2102 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2103}
2104
2105
2106/**
2107 * @opcode 0x4d
2108 */
2109FNIEMOP_DEF(iemOp_dec_eBP)
2110{
2111 /*
2112 * This is a REX prefix in 64-bit mode.
2113 */
2114 if (IEM_IS_64BIT_CODE(pVCpu))
2115 {
2116 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2117 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2118 pVCpu->iem.s.uRexReg = 1 << 3;
2119 pVCpu->iem.s.uRexB = 1 << 3;
2120 iemRecalEffOpSize(pVCpu);
2121
2122 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2123 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2124 }
2125
2126 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2127 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2128}
2129
2130
2131/**
2132 * @opcode 0x4e
2133 */
2134FNIEMOP_DEF(iemOp_dec_eSI)
2135{
2136 /*
2137 * This is a REX prefix in 64-bit mode.
2138 */
2139 if (IEM_IS_64BIT_CODE(pVCpu))
2140 {
2141 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2142 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2143 pVCpu->iem.s.uRexReg = 1 << 3;
2144 pVCpu->iem.s.uRexIndex = 1 << 3;
2145 iemRecalEffOpSize(pVCpu);
2146
2147 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2148 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2149 }
2150
2151 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2152 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2153}
2154
2155
2156/**
2157 * @opcode 0x4f
2158 */
2159FNIEMOP_DEF(iemOp_dec_eDI)
2160{
2161 /*
2162 * This is a REX prefix in 64-bit mode.
2163 */
2164 if (IEM_IS_64BIT_CODE(pVCpu))
2165 {
2166 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2167 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2168 pVCpu->iem.s.uRexReg = 1 << 3;
2169 pVCpu->iem.s.uRexB = 1 << 3;
2170 pVCpu->iem.s.uRexIndex = 1 << 3;
2171 iemRecalEffOpSize(pVCpu);
2172
2173 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2174 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2175 }
2176
2177 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2178 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2179}
2180
2181
2182/**
2183 * Common 'push register' helper.
2184 */
2185FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2186{
2187 if (IEM_IS_64BIT_CODE(pVCpu))
2188 {
2189 iReg |= pVCpu->iem.s.uRexB;
2190 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2191 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2192 }
2193
2194 switch (pVCpu->iem.s.enmEffOpSize)
2195 {
2196 case IEMMODE_16BIT:
2197 IEM_MC_BEGIN(0, 1, 0);
2198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2199 IEM_MC_LOCAL(uint16_t, u16Value);
2200 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2201 IEM_MC_PUSH_U16(u16Value);
2202 IEM_MC_ADVANCE_RIP_AND_FINISH();
2203 IEM_MC_END();
2204 break;
2205
2206 case IEMMODE_32BIT:
2207 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
2208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2209 IEM_MC_LOCAL(uint32_t, u32Value);
2210 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2211 IEM_MC_PUSH_U32(u32Value);
2212 IEM_MC_ADVANCE_RIP_AND_FINISH();
2213 IEM_MC_END();
2214 break;
2215
2216 case IEMMODE_64BIT:
2217 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
2218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2219 IEM_MC_LOCAL(uint64_t, u64Value);
2220 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2221 IEM_MC_PUSH_U64(u64Value);
2222 IEM_MC_ADVANCE_RIP_AND_FINISH();
2223 IEM_MC_END();
2224 break;
2225
2226 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2227 }
2228}
2229
2230
2231/**
2232 * @opcode 0x50
2233 */
2234FNIEMOP_DEF(iemOp_push_eAX)
2235{
2236 IEMOP_MNEMONIC(push_rAX, "push rAX");
2237 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2238}
2239
2240
2241/**
2242 * @opcode 0x51
2243 */
2244FNIEMOP_DEF(iemOp_push_eCX)
2245{
2246 IEMOP_MNEMONIC(push_rCX, "push rCX");
2247 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2248}
2249
2250
2251/**
2252 * @opcode 0x52
2253 */
2254FNIEMOP_DEF(iemOp_push_eDX)
2255{
2256 IEMOP_MNEMONIC(push_rDX, "push rDX");
2257 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2258}
2259
2260
2261/**
2262 * @opcode 0x53
2263 */
2264FNIEMOP_DEF(iemOp_push_eBX)
2265{
2266 IEMOP_MNEMONIC(push_rBX, "push rBX");
2267 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2268}
2269
2270
2271/**
2272 * @opcode 0x54
2273 */
2274FNIEMOP_DEF(iemOp_push_eSP)
2275{
2276 IEMOP_MNEMONIC(push_rSP, "push rSP");
2277 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
2278 {
2279 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086);
2280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2281 IEM_MC_LOCAL(uint16_t, u16Value);
2282 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2283 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2284 IEM_MC_PUSH_U16(u16Value);
2285 IEM_MC_ADVANCE_RIP_AND_FINISH();
2286 IEM_MC_END();
2287 }
2288 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2289}
2290
2291
2292/**
2293 * @opcode 0x55
2294 */
2295FNIEMOP_DEF(iemOp_push_eBP)
2296{
2297 IEMOP_MNEMONIC(push_rBP, "push rBP");
2298 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2299}
2300
2301
2302/**
2303 * @opcode 0x56
2304 */
2305FNIEMOP_DEF(iemOp_push_eSI)
2306{
2307 IEMOP_MNEMONIC(push_rSI, "push rSI");
2308 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2309}
2310
2311
2312/**
2313 * @opcode 0x57
2314 */
2315FNIEMOP_DEF(iemOp_push_eDI)
2316{
2317 IEMOP_MNEMONIC(push_rDI, "push rDI");
2318 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2319}
2320
2321
2322/**
2323 * Common 'pop register' helper.
2324 */
2325FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2326{
2327 if (IEM_IS_64BIT_CODE(pVCpu))
2328 {
2329 iReg |= pVCpu->iem.s.uRexB;
2330 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2331 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2332 }
2333
2334 switch (pVCpu->iem.s.enmEffOpSize)
2335 {
2336 case IEMMODE_16BIT:
2337 IEM_MC_BEGIN(0, 1, 0);
2338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2339 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2340 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
2341 IEM_MC_POP_U16(pu16Dst);
2342 IEM_MC_ADVANCE_RIP_AND_FINISH();
2343 IEM_MC_END();
2344 break;
2345
2346 case IEMMODE_32BIT:
2347 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
2348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2349 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2350 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
2351 IEM_MC_POP_U32(pu32Dst);
2352 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
2353 IEM_MC_ADVANCE_RIP_AND_FINISH();
2354 IEM_MC_END();
2355 break;
2356
2357 case IEMMODE_64BIT:
2358 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
2359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2360 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2361 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
2362 IEM_MC_POP_U64(pu64Dst);
2363 IEM_MC_ADVANCE_RIP_AND_FINISH();
2364 IEM_MC_END();
2365 break;
2366
2367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2368 }
2369}
2370
2371
2372/**
2373 * @opcode 0x58
2374 */
2375FNIEMOP_DEF(iemOp_pop_eAX)
2376{
2377 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2378 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2379}
2380
2381
2382/**
2383 * @opcode 0x59
2384 */
2385FNIEMOP_DEF(iemOp_pop_eCX)
2386{
2387 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2388 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2389}
2390
2391
2392/**
2393 * @opcode 0x5a
2394 */
2395FNIEMOP_DEF(iemOp_pop_eDX)
2396{
2397 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2398 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2399}
2400
2401
2402/**
2403 * @opcode 0x5b
2404 */
2405FNIEMOP_DEF(iemOp_pop_eBX)
2406{
2407 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2408 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2409}
2410
2411
2412/**
2413 * @opcode 0x5c
2414 */
2415FNIEMOP_DEF(iemOp_pop_eSP)
2416{
2417 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2418 if (IEM_IS_64BIT_CODE(pVCpu))
2419 {
2420 if (pVCpu->iem.s.uRexB)
2421 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2422 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2423 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2424 }
2425
2426 /** @todo add testcase for this instruction. */
2427 switch (pVCpu->iem.s.enmEffOpSize)
2428 {
2429 case IEMMODE_16BIT:
2430 IEM_MC_BEGIN(0, 1, 0);
2431 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2432 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2433 IEM_MC_LOCAL(uint16_t, u16Dst);
2434 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
2435 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
2436 IEM_MC_ADVANCE_RIP_AND_FINISH();
2437 IEM_MC_END();
2438 break;
2439
2440 case IEMMODE_32BIT:
2441 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
2442 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2443 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2444 IEM_MC_LOCAL(uint32_t, u32Dst);
2445 IEM_MC_POP_U32(&u32Dst);
2446 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
2447 IEM_MC_ADVANCE_RIP_AND_FINISH();
2448 IEM_MC_END();
2449 break;
2450
2451 case IEMMODE_64BIT:
2452 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
2453 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2454 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2455 IEM_MC_LOCAL(uint64_t, u64Dst);
2456 IEM_MC_POP_U64(&u64Dst);
2457 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
2458 IEM_MC_ADVANCE_RIP_AND_FINISH();
2459 IEM_MC_END();
2460 break;
2461
2462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2463 }
2464}
2465
2466
2467/**
2468 * @opcode 0x5d
2469 */
2470FNIEMOP_DEF(iemOp_pop_eBP)
2471{
2472 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2473 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2474}
2475
2476
2477/**
2478 * @opcode 0x5e
2479 */
2480FNIEMOP_DEF(iemOp_pop_eSI)
2481{
2482 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2483 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2484}
2485
2486
2487/**
2488 * @opcode 0x5f
2489 */
2490FNIEMOP_DEF(iemOp_pop_eDI)
2491{
2492 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2493 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2494}
2495
2496
2497/**
2498 * @opcode 0x60
2499 */
2500FNIEMOP_DEF(iemOp_pusha)
2501{
2502 IEMOP_MNEMONIC(pusha, "pusha");
2503 IEMOP_HLP_MIN_186();
2504 IEMOP_HLP_NO_64BIT();
2505 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2506 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_16);
2507 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2508 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_32);
2509}
2510
2511
2512/**
2513 * @opcode 0x61
2514 */
2515FNIEMOP_DEF(iemOp_popa__mvex)
2516{
2517 if (!IEM_IS_64BIT_CODE(pVCpu))
2518 {
2519 IEMOP_MNEMONIC(popa, "popa");
2520 IEMOP_HLP_MIN_186();
2521 IEMOP_HLP_NO_64BIT();
2522 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2523 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_popa_16);
2524 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2525 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_popa_32);
2526 }
2527 IEMOP_MNEMONIC(mvex, "mvex");
2528 Log(("mvex prefix is not supported!\n"));
2529 IEMOP_RAISE_INVALID_OPCODE_RET();
2530}
2531
2532
2533/**
2534 * @opcode 0x62
2535 * @opmnemonic bound
2536 * @op1 Gv_RO
2537 * @op2 Ma
2538 * @opmincpu 80186
2539 * @ophints harmless x86_invalid_64
2540 * @optest op1=0 op2=0 ->
2541 * @optest op1=1 op2=0 -> value.xcpt=5
2542 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2543 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2544 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2545 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2546 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2547 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2548 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2549 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2550 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2551 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2552 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2553 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2554 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2555 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2556 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2557 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2558 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2559 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2560 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2561 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2562 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2563 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2564 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2565 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2566 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2567 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2568 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2569 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2570 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2571 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2572 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2573 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2574 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2575 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2576 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2577 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2578 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2579 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2580 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2581 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2582 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2583 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2584 */
2585FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2586{
2587 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2588 compatability mode it is invalid with MOD=3.
2589
2590 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2591 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2592 given as R and X without an exact description, so we assume it builds on
2593 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2594 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2595 uint8_t bRm;
2596 if (!IEM_IS_64BIT_CODE(pVCpu))
2597 {
2598 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2599 IEMOP_HLP_MIN_186();
2600 IEM_OPCODE_GET_NEXT_U8(&bRm);
2601 if (IEM_IS_MODRM_MEM_MODE(bRm))
2602 {
2603 /** @todo testcase: check that there are two memory accesses involved. Check
2604 * whether they're both read before the \#BR triggers. */
2605 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2606 {
2607 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_64BIT);
2608 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2609 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2610 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2612
2613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2615
2616 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2617 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2618 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2619
2620 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2621 IEM_MC_END();
2622 }
2623 else /* 32-bit operands */
2624 {
2625 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT);
2626 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2627 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2628 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2630
2631 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2633
2634 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2635 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2636 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2637
2638 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2639 IEM_MC_END();
2640 }
2641 }
2642
2643 /*
2644 * @opdone
2645 */
2646 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2647 {
2648 /* Note that there is no need for the CPU to fetch further bytes
2649 here because MODRM.MOD == 3. */
2650 Log(("evex not supported by the guest CPU!\n"));
2651 IEMOP_RAISE_INVALID_OPCODE_RET();
2652 }
2653 }
2654 else
2655 {
2656 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2657 * does modr/m read, whereas AMD probably doesn't... */
2658 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2659 {
2660 Log(("evex not supported by the guest CPU!\n"));
2661 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2662 }
2663 IEM_OPCODE_GET_NEXT_U8(&bRm);
2664 }
2665
2666 IEMOP_MNEMONIC(evex, "evex");
2667 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2668 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2669 Log(("evex prefix is not implemented!\n"));
2670 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2671}
2672
2673
2674/** Opcode 0x63 - non-64-bit modes. */
2675FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2676{
2677 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2678 IEMOP_HLP_MIN_286();
2679 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2681
2682 if (IEM_IS_MODRM_REG_MODE(bRm))
2683 {
2684 /* Register */
2685 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_64BIT);
2686 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2687 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2688 IEM_MC_ARG(uint16_t, u16Src, 1);
2689 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2690
2691 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2692 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2693 IEM_MC_REF_EFLAGS(pEFlags);
2694 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2695
2696 IEM_MC_ADVANCE_RIP_AND_FINISH();
2697 IEM_MC_END();
2698 }
2699 else
2700 {
2701 /* Memory */
2702 IEM_MC_BEGIN(3, 3, IEM_MC_F_NOT_64BIT);
2703 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2704 IEM_MC_ARG(uint16_t, u16Src, 1);
2705 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2707 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2708
2709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2710 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2711 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2712 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2713 IEM_MC_FETCH_EFLAGS(EFlags);
2714 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2715
2716 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
2717 IEM_MC_COMMIT_EFLAGS(EFlags);
2718 IEM_MC_ADVANCE_RIP_AND_FINISH();
2719 IEM_MC_END();
2720 }
2721}
2722
2723
2724/**
2725 * @opcode 0x63
2726 *
2727 * @note This is a weird one. It works like a regular move instruction if
2728 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2729 * @todo This definitely needs a testcase to verify the odd cases. */
2730FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2731{
2732 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2733
2734 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2736
2737 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2738 {
2739 if (IEM_IS_MODRM_REG_MODE(bRm))
2740 {
2741 /*
2742 * Register to register.
2743 */
2744 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
2745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2746 IEM_MC_LOCAL(uint64_t, u64Value);
2747 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2748 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2749 IEM_MC_ADVANCE_RIP_AND_FINISH();
2750 IEM_MC_END();
2751 }
2752 else
2753 {
2754 /*
2755 * We're loading a register from memory.
2756 */
2757 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
2758 IEM_MC_LOCAL(uint64_t, u64Value);
2759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2762 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2763 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2764 IEM_MC_ADVANCE_RIP_AND_FINISH();
2765 IEM_MC_END();
2766 }
2767 }
2768 else
2769 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2770}
2771
2772
2773/**
2774 * @opcode 0x64
2775 * @opmnemonic segfs
2776 * @opmincpu 80386
2777 * @opgroup og_prefixes
2778 */
2779FNIEMOP_DEF(iemOp_seg_FS)
2780{
2781 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2782 IEMOP_HLP_MIN_386();
2783
2784 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2785 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2786
2787 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2788 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2789}
2790
2791
2792/**
2793 * @opcode 0x65
2794 * @opmnemonic seggs
2795 * @opmincpu 80386
2796 * @opgroup og_prefixes
2797 */
2798FNIEMOP_DEF(iemOp_seg_GS)
2799{
2800 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2801 IEMOP_HLP_MIN_386();
2802
2803 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2804 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2805
2806 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2807 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2808}
2809
2810
2811/**
2812 * @opcode 0x66
2813 * @opmnemonic opsize
2814 * @openc prefix
2815 * @opmincpu 80386
2816 * @ophints harmless
2817 * @opgroup og_prefixes
2818 */
2819FNIEMOP_DEF(iemOp_op_size)
2820{
2821 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2822 IEMOP_HLP_MIN_386();
2823
2824 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2825 iemRecalEffOpSize(pVCpu);
2826
2827 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2828 when REPZ or REPNZ are present. */
2829 if (pVCpu->iem.s.idxPrefix == 0)
2830 pVCpu->iem.s.idxPrefix = 1;
2831
2832 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2833 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2834}
2835
2836
2837/**
2838 * @opcode 0x67
2839 * @opmnemonic addrsize
2840 * @openc prefix
2841 * @opmincpu 80386
2842 * @ophints harmless
2843 * @opgroup og_prefixes
2844 */
2845FNIEMOP_DEF(iemOp_addr_size)
2846{
2847 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2848 IEMOP_HLP_MIN_386();
2849
2850 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2851 switch (pVCpu->iem.s.enmDefAddrMode)
2852 {
2853 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2854 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2855 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2856 default: AssertFailed();
2857 }
2858
2859 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2860 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2861}
2862
2863
2864/**
2865 * @opcode 0x68
2866 */
2867FNIEMOP_DEF(iemOp_push_Iz)
2868{
2869 IEMOP_MNEMONIC(push_Iz, "push Iz");
2870 IEMOP_HLP_MIN_186();
2871 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2872 switch (pVCpu->iem.s.enmEffOpSize)
2873 {
2874 case IEMMODE_16BIT:
2875 {
2876 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2877 IEM_MC_BEGIN(0, 0, 0);
2878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2879 IEM_MC_PUSH_U16(u16Imm);
2880 IEM_MC_ADVANCE_RIP_AND_FINISH();
2881 IEM_MC_END();
2882 break;
2883 }
2884
2885 case IEMMODE_32BIT:
2886 {
2887 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2888 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
2889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2890 IEM_MC_PUSH_U32(u32Imm);
2891 IEM_MC_ADVANCE_RIP_AND_FINISH();
2892 IEM_MC_END();
2893 break;
2894 }
2895
2896 case IEMMODE_64BIT:
2897 {
2898 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2899 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT);
2900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2901 IEM_MC_PUSH_U64(u64Imm);
2902 IEM_MC_ADVANCE_RIP_AND_FINISH();
2903 IEM_MC_END();
2904 break;
2905 }
2906
2907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2908 }
2909}
2910
2911
2912/**
2913 * @opcode 0x69
2914 */
2915FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2916{
2917 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2918 IEMOP_HLP_MIN_186();
2919 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2920 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2921
2922 switch (pVCpu->iem.s.enmEffOpSize)
2923 {
2924 case IEMMODE_16BIT:
2925 {
2926 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2927 if (IEM_IS_MODRM_REG_MODE(bRm))
2928 {
2929 /* register operand */
2930 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2931 IEM_MC_BEGIN(3, 1, 0);
2932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2933 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2934 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2935 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2936 IEM_MC_LOCAL(uint16_t, u16Tmp);
2937
2938 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2939 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2940 IEM_MC_REF_EFLAGS(pEFlags);
2941 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2942 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2943
2944 IEM_MC_ADVANCE_RIP_AND_FINISH();
2945 IEM_MC_END();
2946 }
2947 else
2948 {
2949 /* memory operand */
2950 IEM_MC_BEGIN(3, 2, 0);
2951 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2952 IEM_MC_ARG(uint16_t, u16Src, 1);
2953 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2954 IEM_MC_LOCAL(uint16_t, u16Tmp);
2955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2956
2957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2958 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2959 IEM_MC_ASSIGN(u16Src, u16Imm);
2960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2961 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2962 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2963 IEM_MC_REF_EFLAGS(pEFlags);
2964 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2965 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2966
2967 IEM_MC_ADVANCE_RIP_AND_FINISH();
2968 IEM_MC_END();
2969 }
2970 break;
2971 }
2972
2973 case IEMMODE_32BIT:
2974 {
2975 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2976 if (IEM_IS_MODRM_REG_MODE(bRm))
2977 {
2978 /* register operand */
2979 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2980 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386);
2981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2982 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2983 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2984 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2985 IEM_MC_LOCAL(uint32_t, u32Tmp);
2986
2987 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2988 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2989 IEM_MC_REF_EFLAGS(pEFlags);
2990 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2991 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2992
2993 IEM_MC_ADVANCE_RIP_AND_FINISH();
2994 IEM_MC_END();
2995 }
2996 else
2997 {
2998 /* memory operand */
2999 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386);
3000 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3001 IEM_MC_ARG(uint32_t, u32Src, 1);
3002 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3003 IEM_MC_LOCAL(uint32_t, u32Tmp);
3004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3005
3006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3007 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3008 IEM_MC_ASSIGN(u32Src, u32Imm);
3009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3010 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3011 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3012 IEM_MC_REF_EFLAGS(pEFlags);
3013 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3014 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3015
3016 IEM_MC_ADVANCE_RIP_AND_FINISH();
3017 IEM_MC_END();
3018 }
3019 break;
3020 }
3021
3022 case IEMMODE_64BIT:
3023 {
3024 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3025 if (IEM_IS_MODRM_REG_MODE(bRm))
3026 {
3027 /* register operand */
3028 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3029 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT);
3030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3031 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3032 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
3033 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3034 IEM_MC_LOCAL(uint64_t, u64Tmp);
3035
3036 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3037 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3038 IEM_MC_REF_EFLAGS(pEFlags);
3039 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3040 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3041
3042 IEM_MC_ADVANCE_RIP_AND_FINISH();
3043 IEM_MC_END();
3044 }
3045 else
3046 {
3047 /* memory operand */
3048 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT);
3049 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3050 IEM_MC_ARG(uint64_t, u64Src, 1);
3051 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3052 IEM_MC_LOCAL(uint64_t, u64Tmp);
3053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3054
3055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3056 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3057 IEM_MC_ASSIGN_U32_SX_U64(u64Src, u32Imm); /* parameter count for the threaded function for this block. */
3058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3059 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3060 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3061 IEM_MC_REF_EFLAGS(pEFlags);
3062 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3063 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3064
3065 IEM_MC_ADVANCE_RIP_AND_FINISH();
3066 IEM_MC_END();
3067 }
3068 break;
3069 }
3070
3071 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3072 }
3073}
3074
3075
3076/**
3077 * @opcode 0x6a
3078 */
3079FNIEMOP_DEF(iemOp_push_Ib)
3080{
3081 IEMOP_MNEMONIC(push_Ib, "push Ib");
3082 IEMOP_HLP_MIN_186();
3083 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3084 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3085
3086 switch (pVCpu->iem.s.enmEffOpSize)
3087 {
3088 case IEMMODE_16BIT:
3089 IEM_MC_BEGIN(0, 0, 0);
3090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3091 IEM_MC_PUSH_U16(i8Imm);
3092 IEM_MC_ADVANCE_RIP_AND_FINISH();
3093 IEM_MC_END();
3094 break;
3095 case IEMMODE_32BIT:
3096 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
3097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3098 IEM_MC_PUSH_U32(i8Imm);
3099 IEM_MC_ADVANCE_RIP_AND_FINISH();
3100 IEM_MC_END();
3101 break;
3102 case IEMMODE_64BIT:
3103 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT);
3104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3105 IEM_MC_PUSH_U64(i8Imm);
3106 IEM_MC_ADVANCE_RIP_AND_FINISH();
3107 IEM_MC_END();
3108 break;
3109 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3110 }
3111}
3112
3113
3114/**
3115 * @opcode 0x6b
3116 */
3117FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3118{
3119 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3120 IEMOP_HLP_MIN_186();
3121 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3122 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3123
3124 switch (pVCpu->iem.s.enmEffOpSize)
3125 {
3126 case IEMMODE_16BIT:
3127 {
3128 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3129 if (IEM_IS_MODRM_REG_MODE(bRm))
3130 {
3131 /* register operand */
3132 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3133 IEM_MC_BEGIN(3, 1, 0);
3134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3135 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3136 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3137 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3138 IEM_MC_LOCAL(uint16_t, u16Tmp);
3139
3140 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3141 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3142 IEM_MC_REF_EFLAGS(pEFlags);
3143 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3144 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3145
3146 IEM_MC_ADVANCE_RIP_AND_FINISH();
3147 IEM_MC_END();
3148 }
3149 else
3150 {
3151 /* memory operand */
3152 IEM_MC_BEGIN(3, 2, 0);
3153 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3154 IEM_MC_ARG(uint16_t, u16Src, 1);
3155 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3156 IEM_MC_LOCAL(uint16_t, u16Tmp);
3157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3158
3159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3160 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3161 IEM_MC_ASSIGN(u16Src, u16Imm);
3162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3163 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3164 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3165 IEM_MC_REF_EFLAGS(pEFlags);
3166 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3167 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3168
3169 IEM_MC_ADVANCE_RIP_AND_FINISH();
3170 IEM_MC_END();
3171 }
3172 break;
3173 }
3174
3175 case IEMMODE_32BIT:
3176 {
3177 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3178 if (IEM_IS_MODRM_REG_MODE(bRm))
3179 {
3180 /* register operand */
3181 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3182 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386);
3183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3184 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3185 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3186 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3187 IEM_MC_LOCAL(uint32_t, u32Tmp);
3188
3189 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3190 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3191 IEM_MC_REF_EFLAGS(pEFlags);
3192 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3193 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3194
3195 IEM_MC_ADVANCE_RIP_AND_FINISH();
3196 IEM_MC_END();
3197 }
3198 else
3199 {
3200 /* memory operand */
3201 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386);
3202 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3203 IEM_MC_ARG(uint32_t, u32Src, 1);
3204 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3205 IEM_MC_LOCAL(uint32_t, u32Tmp);
3206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3207
3208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3209 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3210 IEM_MC_ASSIGN(u32Src, u32Imm);
3211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3212 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3213 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3214 IEM_MC_REF_EFLAGS(pEFlags);
3215 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3216 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3217
3218 IEM_MC_ADVANCE_RIP_AND_FINISH();
3219 IEM_MC_END();
3220 }
3221 break;
3222 }
3223
3224 case IEMMODE_64BIT:
3225 {
3226 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3227 if (IEM_IS_MODRM_REG_MODE(bRm))
3228 {
3229 /* register operand */
3230 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3231 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT);
3232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3233 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3234 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
3235 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3236 IEM_MC_LOCAL(uint64_t, u64Tmp);
3237
3238 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3239 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3240 IEM_MC_REF_EFLAGS(pEFlags);
3241 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3242 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3243
3244 IEM_MC_ADVANCE_RIP_AND_FINISH();
3245 IEM_MC_END();
3246 }
3247 else
3248 {
3249 /* memory operand */
3250 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT);
3251 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3252 IEM_MC_ARG(uint64_t, u64Src, 1);
3253 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3254 IEM_MC_LOCAL(uint64_t, u64Tmp);
3255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3256
3257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3258 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the */
3259 IEM_MC_ASSIGN_U8_SX_U64(u64Src, u8Imm); /* parameter count for the threaded function for this block. */
3260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3261 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3262 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3263 IEM_MC_REF_EFLAGS(pEFlags);
3264 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3265 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3266
3267 IEM_MC_ADVANCE_RIP_AND_FINISH();
3268 IEM_MC_END();
3269 }
3270 break;
3271 }
3272
3273 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3274 }
3275}
3276
3277
3278/**
3279 * @opcode 0x6c
3280 */
3281FNIEMOP_DEF(iemOp_insb_Yb_DX)
3282{
3283 IEMOP_HLP_MIN_186();
3284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3285 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3286 {
3287 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3288 switch (pVCpu->iem.s.enmEffAddrMode)
3289 {
3290 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3291 iemCImpl_rep_ins_op8_addr16, false);
3292 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3293 iemCImpl_rep_ins_op8_addr32, false);
3294 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3295 iemCImpl_rep_ins_op8_addr64, false);
3296 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3297 }
3298 }
3299 else
3300 {
3301 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3302 switch (pVCpu->iem.s.enmEffAddrMode)
3303 {
3304 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3305 iemCImpl_ins_op8_addr16, false);
3306 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3307 iemCImpl_ins_op8_addr32, false);
3308 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3309 iemCImpl_ins_op8_addr64, false);
3310 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3311 }
3312 }
3313}
3314
3315
3316/**
3317 * @opcode 0x6d
3318 */
3319FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3320{
3321 IEMOP_HLP_MIN_186();
3322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3323 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3324 {
3325 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3326 switch (pVCpu->iem.s.enmEffOpSize)
3327 {
3328 case IEMMODE_16BIT:
3329 switch (pVCpu->iem.s.enmEffAddrMode)
3330 {
3331 case IEMMODE_16BIT:
3332 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3333 iemCImpl_rep_ins_op16_addr16, false);
3334 case IEMMODE_32BIT:
3335 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3336 iemCImpl_rep_ins_op16_addr32, false);
3337 case IEMMODE_64BIT:
3338 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3339 iemCImpl_rep_ins_op16_addr64, false);
3340 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3341 }
3342 break;
3343 case IEMMODE_64BIT:
3344 case IEMMODE_32BIT:
3345 switch (pVCpu->iem.s.enmEffAddrMode)
3346 {
3347 case IEMMODE_16BIT:
3348 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3349 iemCImpl_rep_ins_op32_addr16, false);
3350 case IEMMODE_32BIT:
3351 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3352 iemCImpl_rep_ins_op32_addr32, false);
3353 case IEMMODE_64BIT:
3354 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3355 iemCImpl_rep_ins_op32_addr64, false);
3356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3357 }
3358 break;
3359 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3360 }
3361 }
3362 else
3363 {
3364 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3365 switch (pVCpu->iem.s.enmEffOpSize)
3366 {
3367 case IEMMODE_16BIT:
3368 switch (pVCpu->iem.s.enmEffAddrMode)
3369 {
3370 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3371 iemCImpl_ins_op16_addr16, false);
3372 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3373 iemCImpl_ins_op16_addr32, false);
3374 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3375 iemCImpl_ins_op16_addr64, false);
3376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3377 }
3378 break;
3379 case IEMMODE_64BIT:
3380 case IEMMODE_32BIT:
3381 switch (pVCpu->iem.s.enmEffAddrMode)
3382 {
3383 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3384 iemCImpl_ins_op32_addr16, false);
3385 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3386 iemCImpl_ins_op32_addr32, false);
3387 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3388 iemCImpl_ins_op32_addr64, false);
3389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3390 }
3391 break;
3392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3393 }
3394 }
3395}
3396
3397
3398/**
3399 * @opcode 0x6e
3400 */
3401FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3402{
3403 IEMOP_HLP_MIN_186();
3404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3405 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3406 {
3407 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3408 switch (pVCpu->iem.s.enmEffAddrMode)
3409 {
3410 case IEMMODE_16BIT:
3411 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3412 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3413 case IEMMODE_32BIT:
3414 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3415 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3416 case IEMMODE_64BIT:
3417 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3418 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3419 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3420 }
3421 }
3422 else
3423 {
3424 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3425 switch (pVCpu->iem.s.enmEffAddrMode)
3426 {
3427 case IEMMODE_16BIT:
3428 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3429 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3430 case IEMMODE_32BIT:
3431 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3432 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3433 case IEMMODE_64BIT:
3434 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3435 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3436 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3437 }
3438 }
3439}
3440
3441
3442/**
3443 * @opcode 0x6f
3444 */
3445FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3446{
3447 IEMOP_HLP_MIN_186();
3448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3449 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3450 {
3451 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3452 switch (pVCpu->iem.s.enmEffOpSize)
3453 {
3454 case IEMMODE_16BIT:
3455 switch (pVCpu->iem.s.enmEffAddrMode)
3456 {
3457 case IEMMODE_16BIT:
3458 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3459 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3460 case IEMMODE_32BIT:
3461 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3462 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3463 case IEMMODE_64BIT:
3464 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3465 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3466 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3467 }
3468 break;
3469 case IEMMODE_64BIT:
3470 case IEMMODE_32BIT:
3471 switch (pVCpu->iem.s.enmEffAddrMode)
3472 {
3473 case IEMMODE_16BIT:
3474 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3475 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3476 case IEMMODE_32BIT:
3477 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3478 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3479 case IEMMODE_64BIT:
3480 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3481 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3482 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3483 }
3484 break;
3485 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3486 }
3487 }
3488 else
3489 {
3490 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3491 switch (pVCpu->iem.s.enmEffOpSize)
3492 {
3493 case IEMMODE_16BIT:
3494 switch (pVCpu->iem.s.enmEffAddrMode)
3495 {
3496 case IEMMODE_16BIT:
3497 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3498 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3499 case IEMMODE_32BIT:
3500 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3501 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3502 case IEMMODE_64BIT:
3503 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3504 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3505 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3506 }
3507 break;
3508 case IEMMODE_64BIT:
3509 case IEMMODE_32BIT:
3510 switch (pVCpu->iem.s.enmEffAddrMode)
3511 {
3512 case IEMMODE_16BIT:
3513 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3514 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3515 case IEMMODE_32BIT:
3516 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3517 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3518 case IEMMODE_64BIT:
3519 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3520 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3521 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3522 }
3523 break;
3524 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3525 }
3526 }
3527}
3528
3529
3530/**
3531 * @opcode 0x70
3532 */
3533FNIEMOP_DEF(iemOp_jo_Jb)
3534{
3535 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3536 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3537 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3538
3539 IEM_MC_BEGIN(0, 0, 0);
3540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3541 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3542 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3543 } IEM_MC_ELSE() {
3544 IEM_MC_ADVANCE_RIP_AND_FINISH();
3545 } IEM_MC_ENDIF();
3546 IEM_MC_END();
3547}
3548
3549
3550/**
3551 * @opcode 0x71
3552 */
3553FNIEMOP_DEF(iemOp_jno_Jb)
3554{
3555 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3556 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3557 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3558
3559 IEM_MC_BEGIN(0, 0, 0);
3560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3561 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3562 IEM_MC_ADVANCE_RIP_AND_FINISH();
3563 } IEM_MC_ELSE() {
3564 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3565 } IEM_MC_ENDIF();
3566 IEM_MC_END();
3567}
3568
3569/**
3570 * @opcode 0x72
3571 */
3572FNIEMOP_DEF(iemOp_jc_Jb)
3573{
3574 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3575 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3576 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3577
3578 IEM_MC_BEGIN(0, 0, 0);
3579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3580 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3581 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3582 } IEM_MC_ELSE() {
3583 IEM_MC_ADVANCE_RIP_AND_FINISH();
3584 } IEM_MC_ENDIF();
3585 IEM_MC_END();
3586}
3587
3588
3589/**
3590 * @opcode 0x73
3591 */
3592FNIEMOP_DEF(iemOp_jnc_Jb)
3593{
3594 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3595 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3596 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3597
3598 IEM_MC_BEGIN(0, 0, 0);
3599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3600 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3601 IEM_MC_ADVANCE_RIP_AND_FINISH();
3602 } IEM_MC_ELSE() {
3603 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3604 } IEM_MC_ENDIF();
3605 IEM_MC_END();
3606}
3607
3608
3609/**
3610 * @opcode 0x74
3611 */
3612FNIEMOP_DEF(iemOp_je_Jb)
3613{
3614 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3615 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3616 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3617
3618 IEM_MC_BEGIN(0, 0, 0);
3619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3620 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3621 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3622 } IEM_MC_ELSE() {
3623 IEM_MC_ADVANCE_RIP_AND_FINISH();
3624 } IEM_MC_ENDIF();
3625 IEM_MC_END();
3626}
3627
3628
3629/**
3630 * @opcode 0x75
3631 */
3632FNIEMOP_DEF(iemOp_jne_Jb)
3633{
3634 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3635 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3636 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3637
3638 IEM_MC_BEGIN(0, 0, 0);
3639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3640 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3641 IEM_MC_ADVANCE_RIP_AND_FINISH();
3642 } IEM_MC_ELSE() {
3643 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3644 } IEM_MC_ENDIF();
3645 IEM_MC_END();
3646}
3647
3648
3649/**
3650 * @opcode 0x76
3651 */
3652FNIEMOP_DEF(iemOp_jbe_Jb)
3653{
3654 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3655 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3656 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3657
3658 IEM_MC_BEGIN(0, 0, 0);
3659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3660 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3661 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3662 } IEM_MC_ELSE() {
3663 IEM_MC_ADVANCE_RIP_AND_FINISH();
3664 } IEM_MC_ENDIF();
3665 IEM_MC_END();
3666}
3667
3668
3669/**
3670 * @opcode 0x77
3671 */
3672FNIEMOP_DEF(iemOp_jnbe_Jb)
3673{
3674 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3675 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3676 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3677
3678 IEM_MC_BEGIN(0, 0, 0);
3679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3680 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3681 IEM_MC_ADVANCE_RIP_AND_FINISH();
3682 } IEM_MC_ELSE() {
3683 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3684 } IEM_MC_ENDIF();
3685 IEM_MC_END();
3686}
3687
3688
3689/**
3690 * @opcode 0x78
3691 */
3692FNIEMOP_DEF(iemOp_js_Jb)
3693{
3694 IEMOP_MNEMONIC(js_Jb, "js Jb");
3695 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3696 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3697
3698 IEM_MC_BEGIN(0, 0, 0);
3699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3700 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3701 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3702 } IEM_MC_ELSE() {
3703 IEM_MC_ADVANCE_RIP_AND_FINISH();
3704 } IEM_MC_ENDIF();
3705 IEM_MC_END();
3706}
3707
3708
3709/**
3710 * @opcode 0x79
3711 */
3712FNIEMOP_DEF(iemOp_jns_Jb)
3713{
3714 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3715 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3716 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3717
3718 IEM_MC_BEGIN(0, 0, 0);
3719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3720 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3721 IEM_MC_ADVANCE_RIP_AND_FINISH();
3722 } IEM_MC_ELSE() {
3723 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3724 } IEM_MC_ENDIF();
3725 IEM_MC_END();
3726}
3727
3728
3729/**
3730 * @opcode 0x7a
3731 */
3732FNIEMOP_DEF(iemOp_jp_Jb)
3733{
3734 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3735 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3736 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3737
3738 IEM_MC_BEGIN(0, 0, 0);
3739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3740 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3741 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3742 } IEM_MC_ELSE() {
3743 IEM_MC_ADVANCE_RIP_AND_FINISH();
3744 } IEM_MC_ENDIF();
3745 IEM_MC_END();
3746}
3747
3748
3749/**
3750 * @opcode 0x7b
3751 */
3752FNIEMOP_DEF(iemOp_jnp_Jb)
3753{
3754 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3755 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3756 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3757
3758 IEM_MC_BEGIN(0, 0, 0);
3759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3760 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3761 IEM_MC_ADVANCE_RIP_AND_FINISH();
3762 } IEM_MC_ELSE() {
3763 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3764 } IEM_MC_ENDIF();
3765 IEM_MC_END();
3766}
3767
3768
3769/**
3770 * @opcode 0x7c
3771 */
3772FNIEMOP_DEF(iemOp_jl_Jb)
3773{
3774 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3775 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3776 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3777
3778 IEM_MC_BEGIN(0, 0, 0);
3779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3780 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3781 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3782 } IEM_MC_ELSE() {
3783 IEM_MC_ADVANCE_RIP_AND_FINISH();
3784 } IEM_MC_ENDIF();
3785 IEM_MC_END();
3786}
3787
3788
3789/**
3790 * @opcode 0x7d
3791 */
3792FNIEMOP_DEF(iemOp_jnl_Jb)
3793{
3794 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3795 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3796 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3797
3798 IEM_MC_BEGIN(0, 0, 0);
3799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3800 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3801 IEM_MC_ADVANCE_RIP_AND_FINISH();
3802 } IEM_MC_ELSE() {
3803 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3804 } IEM_MC_ENDIF();
3805 IEM_MC_END();
3806}
3807
3808
3809/**
3810 * @opcode 0x7e
3811 */
3812FNIEMOP_DEF(iemOp_jle_Jb)
3813{
3814 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3815 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3816 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3817
3818 IEM_MC_BEGIN(0, 0, 0);
3819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3820 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3821 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3822 } IEM_MC_ELSE() {
3823 IEM_MC_ADVANCE_RIP_AND_FINISH();
3824 } IEM_MC_ENDIF();
3825 IEM_MC_END();
3826}
3827
3828
3829/**
3830 * @opcode 0x7f
3831 */
3832FNIEMOP_DEF(iemOp_jnle_Jb)
3833{
3834 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3835 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3836 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3837
3838 IEM_MC_BEGIN(0, 0, 0);
3839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3840 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3841 IEM_MC_ADVANCE_RIP_AND_FINISH();
3842 } IEM_MC_ELSE() {
3843 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3844 } IEM_MC_ENDIF();
3845 IEM_MC_END();
3846}
3847
3848
3849/**
3850 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3851 * iemOp_Grp1_Eb_Ib_80.
3852 */
3853#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
3854 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3855 { \
3856 /* register target */ \
3857 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3858 IEM_MC_BEGIN(3, 0, 0); \
3859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3860 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3861 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3862 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3863 \
3864 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3865 IEM_MC_REF_EFLAGS(pEFlags); \
3866 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3867 \
3868 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3869 IEM_MC_END(); \
3870 } \
3871 else \
3872 { \
3873 /* memory target */ \
3874 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3875 { \
3876 IEM_MC_BEGIN(3, 3, 0); \
3877 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3878 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3880 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3881 \
3882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3883 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3884 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3885 IEMOP_HLP_DONE_DECODING(); \
3886 \
3887 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3888 IEM_MC_FETCH_EFLAGS(EFlags); \
3889 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3890 \
3891 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
3892 IEM_MC_COMMIT_EFLAGS(EFlags); \
3893 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3894 IEM_MC_END(); \
3895 } \
3896 else \
3897 { \
3898 (void)0
3899
3900#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3901 IEM_MC_BEGIN(3, 3, 0); \
3902 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3903 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3905 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3906 \
3907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3908 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3909 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3910 IEMOP_HLP_DONE_DECODING(); \
3911 \
3912 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3913 IEM_MC_FETCH_EFLAGS(EFlags); \
3914 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
3915 \
3916 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
3917 IEM_MC_COMMIT_EFLAGS(EFlags); \
3918 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3919 IEM_MC_END(); \
3920 } \
3921 } \
3922 (void)0
3923
3924#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
3925 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3926 { \
3927 /* register target */ \
3928 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3929 IEM_MC_BEGIN(3, 0, 0); \
3930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3931 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3932 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3933 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3934 \
3935 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3936 IEM_MC_REF_EFLAGS(pEFlags); \
3937 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3938 \
3939 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3940 IEM_MC_END(); \
3941 } \
3942 else \
3943 { \
3944 /* memory target */ \
3945 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3946 { \
3947 IEM_MC_BEGIN(3, 3, 0); \
3948 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
3949 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3951 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3952 \
3953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3954 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3955 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3956 IEMOP_HLP_DONE_DECODING(); \
3957 \
3958 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3959 IEM_MC_FETCH_EFLAGS(EFlags); \
3960 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3961 \
3962 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
3963 IEM_MC_COMMIT_EFLAGS(EFlags); \
3964 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3965 IEM_MC_END(); \
3966 } \
3967 else \
3968 { \
3969 (void)0
3970
3971#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
3972 IEMOP_HLP_DONE_DECODING(); \
3973 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
3974 } \
3975 } \
3976 (void)0
3977
3978
3979
3980/**
3981 * @opmaps grp1_80,grp1_83
3982 * @opcode /0
3983 */
3984FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
3985{
3986 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
3987 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
3988 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
3989}
3990
3991
3992/**
3993 * @opmaps grp1_80,grp1_83
3994 * @opcode /1
3995 */
3996FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
3997{
3998 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
3999 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
4000 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
4001}
4002
4003
4004/**
4005 * @opmaps grp1_80,grp1_83
4006 * @opcode /2
4007 */
4008FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4009{
4010 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4011 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4012 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4013}
4014
4015
4016/**
4017 * @opmaps grp1_80,grp1_83
4018 * @opcode /3
4019 */
4020FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4021{
4022 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4023 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4024 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4025}
4026
4027
4028/**
4029 * @opmaps grp1_80,grp1_83
4030 * @opcode /4
4031 */
4032FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4033{
4034 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4035 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4036 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4037}
4038
4039
4040/**
4041 * @opmaps grp1_80,grp1_83
4042 * @opcode /5
4043 */
4044FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4045{
4046 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4047 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4048 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4049}
4050
4051
4052/**
4053 * @opmaps grp1_80,grp1_83
4054 * @opcode /6
4055 */
4056FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4057{
4058 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4059 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4060 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4061}
4062
4063
4064/**
4065 * @opmaps grp1_80,grp1_83
4066 * @opcode /7
4067 */
4068FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4069{
4070 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4071 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4072 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4073}
4074
4075
4076/**
4077 * @opcode 0x80
4078 */
4079FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4080{
4081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4082 switch (IEM_GET_MODRM_REG_8(bRm))
4083 {
4084 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4085 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4086 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4087 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4088 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4089 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4090 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4091 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4092 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4093 }
4094}
4095
4096
4097/**
4098 * Body for a group 1 binary operator.
4099 */
4100#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4101 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4102 { \
4103 /* register target */ \
4104 switch (pVCpu->iem.s.enmEffOpSize) \
4105 { \
4106 case IEMMODE_16BIT: \
4107 { \
4108 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4109 IEM_MC_BEGIN(3, 0, 0); \
4110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4111 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4112 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4113 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4114 \
4115 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4116 IEM_MC_REF_EFLAGS(pEFlags); \
4117 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4118 \
4119 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4120 IEM_MC_END(); \
4121 break; \
4122 } \
4123 \
4124 case IEMMODE_32BIT: \
4125 { \
4126 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4127 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
4128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4129 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4130 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4131 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4132 \
4133 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4134 IEM_MC_REF_EFLAGS(pEFlags); \
4135 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4136 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
4137 \
4138 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4139 IEM_MC_END(); \
4140 break; \
4141 } \
4142 \
4143 case IEMMODE_64BIT: \
4144 { \
4145 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4146 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT); \
4147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4148 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4149 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4150 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4151 \
4152 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4153 IEM_MC_REF_EFLAGS(pEFlags); \
4154 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4155 \
4156 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4157 IEM_MC_END(); \
4158 break; \
4159 } \
4160 \
4161 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4162 } \
4163 } \
4164 else \
4165 { \
4166 /* memory target */ \
4167 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4168 { \
4169 switch (pVCpu->iem.s.enmEffOpSize) \
4170 { \
4171 case IEMMODE_16BIT: \
4172 { \
4173 IEM_MC_BEGIN(3, 3, 0); \
4174 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4175 IEM_MC_ARG(uint16_t, u16Src, 1); \
4176 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4178 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4179 \
4180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4181 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4182 IEM_MC_ASSIGN(u16Src, u16Imm); \
4183 IEMOP_HLP_DONE_DECODING(); \
4184 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4185 IEM_MC_FETCH_EFLAGS(EFlags); \
4186 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4187 \
4188 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4189 IEM_MC_COMMIT_EFLAGS(EFlags); \
4190 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4191 IEM_MC_END(); \
4192 break; \
4193 } \
4194 \
4195 case IEMMODE_32BIT: \
4196 { \
4197 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
4198 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4199 IEM_MC_ARG(uint32_t, u32Src, 1); \
4200 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4202 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4203 \
4204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4205 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4206 IEM_MC_ASSIGN(u32Src, u32Imm); \
4207 IEMOP_HLP_DONE_DECODING(); \
4208 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4209 IEM_MC_FETCH_EFLAGS(EFlags); \
4210 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4211 \
4212 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4213 IEM_MC_COMMIT_EFLAGS(EFlags); \
4214 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4215 IEM_MC_END(); \
4216 break; \
4217 } \
4218 \
4219 case IEMMODE_64BIT: \
4220 { \
4221 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT); \
4222 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4223 IEM_MC_ARG(uint64_t, u64Src, 1); \
4224 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4226 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4227 \
4228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4229 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4230 IEMOP_HLP_DONE_DECODING(); \
4231 IEM_MC_ASSIGN(u64Src, u64Imm); \
4232 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4233 IEM_MC_FETCH_EFLAGS(EFlags); \
4234 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4235 \
4236 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4237 IEM_MC_COMMIT_EFLAGS(EFlags); \
4238 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4239 IEM_MC_END(); \
4240 break; \
4241 } \
4242 \
4243 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4244 } \
4245 } \
4246 else \
4247 { \
4248 (void)0
4249/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4250#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4251 switch (pVCpu->iem.s.enmEffOpSize) \
4252 { \
4253 case IEMMODE_16BIT: \
4254 { \
4255 IEM_MC_BEGIN(3, 3, 0); \
4256 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4257 IEM_MC_ARG(uint16_t, u16Src, 1); \
4258 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4260 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4261 \
4262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4263 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4264 IEM_MC_ASSIGN(u16Src, u16Imm); \
4265 IEMOP_HLP_DONE_DECODING(); \
4266 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4267 IEM_MC_FETCH_EFLAGS(EFlags); \
4268 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4269 \
4270 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4271 IEM_MC_COMMIT_EFLAGS(EFlags); \
4272 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4273 IEM_MC_END(); \
4274 break; \
4275 } \
4276 \
4277 case IEMMODE_32BIT: \
4278 { \
4279 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
4280 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4281 IEM_MC_ARG(uint32_t, u32Src, 1); \
4282 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4284 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4285 \
4286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4287 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4288 IEM_MC_ASSIGN(u32Src, u32Imm); \
4289 IEMOP_HLP_DONE_DECODING(); \
4290 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4291 IEM_MC_FETCH_EFLAGS(EFlags); \
4292 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4293 \
4294 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4295 IEM_MC_COMMIT_EFLAGS(EFlags); \
4296 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4297 IEM_MC_END(); \
4298 break; \
4299 } \
4300 \
4301 case IEMMODE_64BIT: \
4302 { \
4303 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT); \
4304 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4305 IEM_MC_ARG(uint64_t, u64Src, 1); \
4306 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4308 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4309 \
4310 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4311 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4312 IEMOP_HLP_DONE_DECODING(); \
4313 IEM_MC_ASSIGN(u64Src, u64Imm); \
4314 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4315 IEM_MC_FETCH_EFLAGS(EFlags); \
4316 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4317 \
4318 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4319 IEM_MC_COMMIT_EFLAGS(EFlags); \
4320 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4321 IEM_MC_END(); \
4322 break; \
4323 } \
4324 \
4325 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4326 } \
4327 } \
4328 } \
4329 (void)0
4330
4331/* read-only version */
4332#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4333 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4334 { \
4335 /* register target */ \
4336 switch (pVCpu->iem.s.enmEffOpSize) \
4337 { \
4338 case IEMMODE_16BIT: \
4339 { \
4340 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4341 IEM_MC_BEGIN(3, 0, 0); \
4342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4343 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4344 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4345 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4346 \
4347 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4348 IEM_MC_REF_EFLAGS(pEFlags); \
4349 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4350 \
4351 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4352 IEM_MC_END(); \
4353 break; \
4354 } \
4355 \
4356 case IEMMODE_32BIT: \
4357 { \
4358 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4359 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
4360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4361 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4362 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4363 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4364 \
4365 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4366 IEM_MC_REF_EFLAGS(pEFlags); \
4367 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4368 \
4369 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4370 IEM_MC_END(); \
4371 break; \
4372 } \
4373 \
4374 case IEMMODE_64BIT: \
4375 { \
4376 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4377 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT); \
4378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4379 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4380 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4381 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4382 \
4383 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4384 IEM_MC_REF_EFLAGS(pEFlags); \
4385 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4386 \
4387 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4388 IEM_MC_END(); \
4389 break; \
4390 } \
4391 \
4392 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4393 } \
4394 } \
4395 else \
4396 { \
4397 /* memory target */ \
4398 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4399 { \
4400 switch (pVCpu->iem.s.enmEffOpSize) \
4401 { \
4402 case IEMMODE_16BIT: \
4403 { \
4404 IEM_MC_BEGIN(3, 3, 0); \
4405 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4406 IEM_MC_ARG(uint16_t, u16Src, 1); \
4407 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4409 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4410 \
4411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4412 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4413 IEM_MC_ASSIGN(u16Src, u16Imm); \
4414 IEMOP_HLP_DONE_DECODING(); \
4415 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4416 IEM_MC_FETCH_EFLAGS(EFlags); \
4417 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4418 \
4419 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
4420 IEM_MC_COMMIT_EFLAGS(EFlags); \
4421 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4422 IEM_MC_END(); \
4423 break; \
4424 } \
4425 \
4426 case IEMMODE_32BIT: \
4427 { \
4428 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
4429 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4430 IEM_MC_ARG(uint32_t, u32Src, 1); \
4431 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4433 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4434 \
4435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4436 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4437 IEM_MC_ASSIGN(u32Src, u32Imm); \
4438 IEMOP_HLP_DONE_DECODING(); \
4439 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4440 IEM_MC_FETCH_EFLAGS(EFlags); \
4441 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4442 \
4443 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
4444 IEM_MC_COMMIT_EFLAGS(EFlags); \
4445 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4446 IEM_MC_END(); \
4447 break; \
4448 } \
4449 \
4450 case IEMMODE_64BIT: \
4451 { \
4452 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT); \
4453 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4454 IEM_MC_ARG(uint64_t, u64Src, 1); \
4455 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4457 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4458 \
4459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4460 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4461 IEMOP_HLP_DONE_DECODING(); \
4462 IEM_MC_ASSIGN(u64Src, u64Imm); \
4463 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4464 IEM_MC_FETCH_EFLAGS(EFlags); \
4465 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4466 \
4467 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
4468 IEM_MC_COMMIT_EFLAGS(EFlags); \
4469 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4470 IEM_MC_END(); \
4471 break; \
4472 } \
4473 \
4474 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4475 } \
4476 } \
4477 else \
4478 { \
4479 IEMOP_HLP_DONE_DECODING(); \
4480 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4481 } \
4482 } \
4483 (void)0
4484
4485
4486/**
4487 * @opmaps grp1_81
4488 * @opcode /0
4489 */
4490FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4491{
4492 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4493 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4494 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4495}
4496
4497
4498/**
4499 * @opmaps grp1_81
4500 * @opcode /1
4501 */
4502FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4503{
4504 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4505 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4506 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4507}
4508
4509
4510/**
4511 * @opmaps grp1_81
4512 * @opcode /2
4513 */
4514FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4515{
4516 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4517 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4518 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4519}
4520
4521
4522/**
4523 * @opmaps grp1_81
4524 * @opcode /3
4525 */
4526FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4527{
4528 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4529 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4530 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4531}
4532
4533
4534/**
4535 * @opmaps grp1_81
4536 * @opcode /4
4537 */
4538FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4539{
4540 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4541 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4542 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4543}
4544
4545
4546/**
4547 * @opmaps grp1_81
4548 * @opcode /5
4549 */
4550FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4551{
4552 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4553 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4554 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4555}
4556
4557
4558/**
4559 * @opmaps grp1_81
4560 * @opcode /6
4561 */
4562FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4563{
4564 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4565 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4566 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4567}
4568
4569
4570/**
4571 * @opmaps grp1_81
4572 * @opcode /7
4573 */
4574FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4575{
4576 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4577 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4578}
4579
4580
4581/**
4582 * @opcode 0x81
4583 */
4584FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4585{
4586 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4587 switch (IEM_GET_MODRM_REG_8(bRm))
4588 {
4589 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4590 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4591 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4592 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4593 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4594 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4595 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4596 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4597 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4598 }
4599}
4600
4601
4602/**
4603 * @opcode 0x82
4604 * @opmnemonic grp1_82
4605 * @opgroup og_groups
4606 */
4607FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4608{
4609 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4610 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4611}
4612
4613
4614/**
4615 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4616 * iemOp_Grp1_Ev_Ib.
4617 */
4618#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4619 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4620 { \
4621 /* \
4622 * Register target \
4623 */ \
4624 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4625 switch (pVCpu->iem.s.enmEffOpSize) \
4626 { \
4627 case IEMMODE_16BIT: \
4628 IEM_MC_BEGIN(3, 0, 0); \
4629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4630 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4631 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4632 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4633 \
4634 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4635 IEM_MC_REF_EFLAGS(pEFlags); \
4636 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4637 \
4638 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4639 IEM_MC_END(); \
4640 break; \
4641 \
4642 case IEMMODE_32BIT: \
4643 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
4644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4645 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4646 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4647 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4648 \
4649 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4650 IEM_MC_REF_EFLAGS(pEFlags); \
4651 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4652 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
4653 \
4654 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4655 IEM_MC_END(); \
4656 break; \
4657 \
4658 case IEMMODE_64BIT: \
4659 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT); \
4660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4661 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4662 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4663 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4664 \
4665 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4666 IEM_MC_REF_EFLAGS(pEFlags); \
4667 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4668 \
4669 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4670 IEM_MC_END(); \
4671 break; \
4672 \
4673 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4674 } \
4675 } \
4676 else \
4677 { \
4678 /* \
4679 * Memory target. \
4680 */ \
4681 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4682 { \
4683 switch (pVCpu->iem.s.enmEffOpSize) \
4684 { \
4685 case IEMMODE_16BIT: \
4686 IEM_MC_BEGIN(3, 3, 0); \
4687 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4688 IEM_MC_ARG(uint16_t, u16Src, 1); \
4689 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4691 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4692 \
4693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4694 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4695 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4696 IEMOP_HLP_DONE_DECODING(); \
4697 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4698 IEM_MC_FETCH_EFLAGS(EFlags); \
4699 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4700 \
4701 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4702 IEM_MC_COMMIT_EFLAGS(EFlags); \
4703 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4704 IEM_MC_END(); \
4705 break; \
4706 \
4707 case IEMMODE_32BIT: \
4708 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
4709 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4710 IEM_MC_ARG(uint32_t, u32Src, 1); \
4711 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4713 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4714 \
4715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4716 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4717 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4718 IEMOP_HLP_DONE_DECODING(); \
4719 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4720 IEM_MC_FETCH_EFLAGS(EFlags); \
4721 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4722 \
4723 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4724 IEM_MC_COMMIT_EFLAGS(EFlags); \
4725 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4726 IEM_MC_END(); \
4727 break; \
4728 \
4729 case IEMMODE_64BIT: \
4730 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT); \
4731 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4732 IEM_MC_ARG(uint64_t, u64Src, 1); \
4733 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4735 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4736 \
4737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4738 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4739 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4740 IEMOP_HLP_DONE_DECODING(); \
4741 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4742 IEM_MC_FETCH_EFLAGS(EFlags); \
4743 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4744 \
4745 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4746 IEM_MC_COMMIT_EFLAGS(EFlags); \
4747 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4748 IEM_MC_END(); \
4749 break; \
4750 \
4751 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4752 } \
4753 } \
4754 else \
4755 { \
4756 (void)0
4757/* Separate macro to work around parsing issue in IEMAllInstPython.py */
4758#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4759 switch (pVCpu->iem.s.enmEffOpSize) \
4760 { \
4761 case IEMMODE_16BIT: \
4762 IEM_MC_BEGIN(3, 3, 0); \
4763 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4764 IEM_MC_ARG(uint16_t, u16Src, 1); \
4765 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4767 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4768 \
4769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4770 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4771 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4772 IEMOP_HLP_DONE_DECODING(); \
4773 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4774 IEM_MC_FETCH_EFLAGS(EFlags); \
4775 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4776 \
4777 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4778 IEM_MC_COMMIT_EFLAGS(EFlags); \
4779 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4780 IEM_MC_END(); \
4781 break; \
4782 \
4783 case IEMMODE_32BIT: \
4784 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
4785 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4786 IEM_MC_ARG(uint32_t, u32Src, 1); \
4787 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4789 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4790 \
4791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4792 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4793 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4794 IEMOP_HLP_DONE_DECODING(); \
4795 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4796 IEM_MC_FETCH_EFLAGS(EFlags); \
4797 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4798 \
4799 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4800 IEM_MC_COMMIT_EFLAGS(EFlags); \
4801 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4802 IEM_MC_END(); \
4803 break; \
4804 \
4805 case IEMMODE_64BIT: \
4806 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT); \
4807 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4808 IEM_MC_ARG(uint64_t, u64Src, 1); \
4809 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4811 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4812 \
4813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4814 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4815 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4816 IEMOP_HLP_DONE_DECODING(); \
4817 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4818 IEM_MC_FETCH_EFLAGS(EFlags); \
4819 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4820 \
4821 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4822 IEM_MC_COMMIT_EFLAGS(EFlags); \
4823 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4824 IEM_MC_END(); \
4825 break; \
4826 \
4827 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4828 } \
4829 } \
4830 } \
4831 (void)0
4832
4833/* read-only variant */
4834#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4835 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4836 { \
4837 /* \
4838 * Register target \
4839 */ \
4840 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4841 switch (pVCpu->iem.s.enmEffOpSize) \
4842 { \
4843 case IEMMODE_16BIT: \
4844 IEM_MC_BEGIN(3, 0, 0); \
4845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4846 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4847 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4848 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4849 \
4850 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4851 IEM_MC_REF_EFLAGS(pEFlags); \
4852 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4853 \
4854 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4855 IEM_MC_END(); \
4856 break; \
4857 \
4858 case IEMMODE_32BIT: \
4859 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
4860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4861 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4862 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4863 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4864 \
4865 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4866 IEM_MC_REF_EFLAGS(pEFlags); \
4867 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4868 \
4869 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4870 IEM_MC_END(); \
4871 break; \
4872 \
4873 case IEMMODE_64BIT: \
4874 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT); \
4875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4876 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4877 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4878 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4879 \
4880 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4881 IEM_MC_REF_EFLAGS(pEFlags); \
4882 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4883 \
4884 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4885 IEM_MC_END(); \
4886 break; \
4887 \
4888 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4889 } \
4890 } \
4891 else \
4892 { \
4893 /* \
4894 * Memory target. \
4895 */ \
4896 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4897 { \
4898 switch (pVCpu->iem.s.enmEffOpSize) \
4899 { \
4900 case IEMMODE_16BIT: \
4901 IEM_MC_BEGIN(3, 3, 0); \
4902 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4903 IEM_MC_ARG(uint16_t, u16Src, 1); \
4904 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4906 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4907 \
4908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4909 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4910 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4911 IEMOP_HLP_DONE_DECODING(); \
4912 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4913 IEM_MC_FETCH_EFLAGS(EFlags); \
4914 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4915 \
4916 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
4917 IEM_MC_COMMIT_EFLAGS(EFlags); \
4918 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4919 IEM_MC_END(); \
4920 break; \
4921 \
4922 case IEMMODE_32BIT: \
4923 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
4924 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4925 IEM_MC_ARG(uint32_t, u32Src, 1); \
4926 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4927 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4928 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4929 \
4930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4931 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4932 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4933 IEMOP_HLP_DONE_DECODING(); \
4934 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4935 IEM_MC_FETCH_EFLAGS(EFlags); \
4936 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4937 \
4938 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
4939 IEM_MC_COMMIT_EFLAGS(EFlags); \
4940 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4941 IEM_MC_END(); \
4942 break; \
4943 \
4944 case IEMMODE_64BIT: \
4945 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT); \
4946 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4947 IEM_MC_ARG(uint64_t, u64Src, 1); \
4948 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4950 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4951 \
4952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4953 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4954 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4955 IEMOP_HLP_DONE_DECODING(); \
4956 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4957 IEM_MC_FETCH_EFLAGS(EFlags); \
4958 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4959 \
4960 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
4961 IEM_MC_COMMIT_EFLAGS(EFlags); \
4962 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4963 IEM_MC_END(); \
4964 break; \
4965 \
4966 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4967 } \
4968 } \
4969 else \
4970 { \
4971 IEMOP_HLP_DONE_DECODING(); \
4972 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4973 } \
4974 } \
4975 (void)0
4976
4977/**
4978 * @opmaps grp1_83
4979 * @opcode /0
4980 */
4981FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
4982{
4983 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
4984 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4985 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4986}
4987
4988
4989/**
4990 * @opmaps grp1_83
4991 * @opcode /1
4992 */
4993FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
4994{
4995 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
4996 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4997 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4998}
4999
5000
5001/**
5002 * @opmaps grp1_83
5003 * @opcode /2
5004 */
5005FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5006{
5007 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5008 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5009 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5010}
5011
5012
5013/**
5014 * @opmaps grp1_83
5015 * @opcode /3
5016 */
5017FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5018{
5019 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5020 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5021 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5022}
5023
5024
5025/**
5026 * @opmaps grp1_83
5027 * @opcode /4
5028 */
5029FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5030{
5031 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5032 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5033 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5034}
5035
5036
5037/**
5038 * @opmaps grp1_83
5039 * @opcode /5
5040 */
5041FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5042{
5043 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5044 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5045 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5046}
5047
5048
5049/**
5050 * @opmaps grp1_83
5051 * @opcode /6
5052 */
5053FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5054{
5055 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5056 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5057 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5058}
5059
5060
5061/**
5062 * @opmaps grp1_83
5063 * @opcode /7
5064 */
5065FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5066{
5067 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5068 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5069}
5070
5071
5072/**
5073 * @opcode 0x83
5074 */
5075FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5076{
5077 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5078 to the 386 even if absent in the intel reference manuals and some
5079 3rd party opcode listings. */
5080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5081 switch (IEM_GET_MODRM_REG_8(bRm))
5082 {
5083 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5084 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5085 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5086 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5087 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5088 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5089 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5090 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5091 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5092 }
5093}
5094
5095
5096/**
5097 * @opcode 0x84
5098 */
5099FNIEMOP_DEF(iemOp_test_Eb_Gb)
5100{
5101 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5102 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5103 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8);
5104 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
5105}
5106
5107
5108/**
5109 * @opcode 0x85
5110 */
5111FNIEMOP_DEF(iemOp_test_Ev_Gv)
5112{
5113 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5114 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5115 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64);
5116}
5117
5118
5119/**
5120 * @opcode 0x86
5121 */
5122FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5123{
5124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5125 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5126
5127 /*
5128 * If rm is denoting a register, no more instruction bytes.
5129 */
5130 if (IEM_IS_MODRM_REG_MODE(bRm))
5131 {
5132 IEM_MC_BEGIN(0, 2, 0);
5133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5134 IEM_MC_LOCAL(uint8_t, uTmp1);
5135 IEM_MC_LOCAL(uint8_t, uTmp2);
5136
5137 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5138 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5139 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5140 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5141
5142 IEM_MC_ADVANCE_RIP_AND_FINISH();
5143 IEM_MC_END();
5144 }
5145 else
5146 {
5147 /*
5148 * We're accessing memory.
5149 */
5150 IEM_MC_BEGIN(2, 4, 0);
5151 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5152 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5153 IEM_MC_LOCAL(uint8_t, uTmpReg);
5154 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
5155 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1);
5156
5157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5159 IEM_MC_MEM_MAP_U8_RW(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5160 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5161 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5162 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
5163 else
5164 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
5165 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Mem, bUnmapInfo);
5166 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5167
5168 IEM_MC_ADVANCE_RIP_AND_FINISH();
5169 IEM_MC_END();
5170 }
5171}
5172
5173
5174/**
5175 * @opcode 0x87
5176 */
5177FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5178{
5179 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5180 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5181
5182 /*
5183 * If rm is denoting a register, no more instruction bytes.
5184 */
5185 if (IEM_IS_MODRM_REG_MODE(bRm))
5186 {
5187 switch (pVCpu->iem.s.enmEffOpSize)
5188 {
5189 case IEMMODE_16BIT:
5190 IEM_MC_BEGIN(0, 2, 0);
5191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5192 IEM_MC_LOCAL(uint16_t, uTmp1);
5193 IEM_MC_LOCAL(uint16_t, uTmp2);
5194
5195 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5196 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5197 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5198 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5199
5200 IEM_MC_ADVANCE_RIP_AND_FINISH();
5201 IEM_MC_END();
5202 break;
5203
5204 case IEMMODE_32BIT:
5205 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386);
5206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5207 IEM_MC_LOCAL(uint32_t, uTmp1);
5208 IEM_MC_LOCAL(uint32_t, uTmp2);
5209
5210 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5211 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5212 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5213 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5214
5215 IEM_MC_ADVANCE_RIP_AND_FINISH();
5216 IEM_MC_END();
5217 break;
5218
5219 case IEMMODE_64BIT:
5220 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
5221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5222 IEM_MC_LOCAL(uint64_t, uTmp1);
5223 IEM_MC_LOCAL(uint64_t, uTmp2);
5224
5225 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5226 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5227 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5228 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5229
5230 IEM_MC_ADVANCE_RIP_AND_FINISH();
5231 IEM_MC_END();
5232 break;
5233
5234 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5235 }
5236 }
5237 else
5238 {
5239 /*
5240 * We're accessing memory.
5241 */
5242 switch (pVCpu->iem.s.enmEffOpSize)
5243 {
5244 case IEMMODE_16BIT:
5245 IEM_MC_BEGIN(2, 4, 0);
5246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5247 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5248 IEM_MC_LOCAL(uint16_t, uTmpReg);
5249 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
5250 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1);
5251
5252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5254 IEM_MC_MEM_MAP_U16_RW(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5255 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5256 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5257 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
5258 else
5259 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
5260 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Mem, bUnmapInfo);
5261 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5262
5263 IEM_MC_ADVANCE_RIP_AND_FINISH();
5264 IEM_MC_END();
5265 break;
5266
5267 case IEMMODE_32BIT:
5268 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386);
5269 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5270 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5271 IEM_MC_LOCAL(uint32_t, uTmpReg);
5272 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
5273 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1);
5274
5275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5277 IEM_MC_MEM_MAP_U32_RW(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5278 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5279 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5280 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
5281 else
5282 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
5283 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Mem, bUnmapInfo);
5284 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5285
5286 IEM_MC_ADVANCE_RIP_AND_FINISH();
5287 IEM_MC_END();
5288 break;
5289
5290 case IEMMODE_64BIT:
5291 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT);
5292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5293 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5294 IEM_MC_LOCAL(uint64_t, uTmpReg);
5295 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
5296 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1);
5297
5298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5300 IEM_MC_MEM_MAP_U64_RW(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5301 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5302 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5303 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
5304 else
5305 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
5306 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Mem, bUnmapInfo);
5307 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5308
5309 IEM_MC_ADVANCE_RIP_AND_FINISH();
5310 IEM_MC_END();
5311 break;
5312
5313 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5314 }
5315 }
5316}
5317
5318
5319/**
5320 * @opcode 0x88
5321 */
5322FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5323{
5324 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5325
5326 uint8_t bRm;
5327 IEM_OPCODE_GET_NEXT_U8(&bRm);
5328
5329 /*
5330 * If rm is denoting a register, no more instruction bytes.
5331 */
5332 if (IEM_IS_MODRM_REG_MODE(bRm))
5333 {
5334 IEM_MC_BEGIN(0, 1, 0);
5335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5336 IEM_MC_LOCAL(uint8_t, u8Value);
5337 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5338 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5339 IEM_MC_ADVANCE_RIP_AND_FINISH();
5340 IEM_MC_END();
5341 }
5342 else
5343 {
5344 /*
5345 * We're writing a register to memory.
5346 */
5347 IEM_MC_BEGIN(0, 2, 0);
5348 IEM_MC_LOCAL(uint8_t, u8Value);
5349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5352 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5353 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5354 IEM_MC_ADVANCE_RIP_AND_FINISH();
5355 IEM_MC_END();
5356 }
5357}
5358
5359
5360/**
5361 * @opcode 0x89
5362 */
5363FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5364{
5365 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5366
5367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5368
5369 /*
5370 * If rm is denoting a register, no more instruction bytes.
5371 */
5372 if (IEM_IS_MODRM_REG_MODE(bRm))
5373 {
5374 switch (pVCpu->iem.s.enmEffOpSize)
5375 {
5376 case IEMMODE_16BIT:
5377 IEM_MC_BEGIN(0, 1, 0);
5378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5379 IEM_MC_LOCAL(uint16_t, u16Value);
5380 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5381 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5382 IEM_MC_ADVANCE_RIP_AND_FINISH();
5383 IEM_MC_END();
5384 break;
5385
5386 case IEMMODE_32BIT:
5387 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
5388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5389 IEM_MC_LOCAL(uint32_t, u32Value);
5390 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5391 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5392 IEM_MC_ADVANCE_RIP_AND_FINISH();
5393 IEM_MC_END();
5394 break;
5395
5396 case IEMMODE_64BIT:
5397 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
5398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5399 IEM_MC_LOCAL(uint64_t, u64Value);
5400 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5401 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5402 IEM_MC_ADVANCE_RIP_AND_FINISH();
5403 IEM_MC_END();
5404 break;
5405
5406 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5407 }
5408 }
5409 else
5410 {
5411 /*
5412 * We're writing a register to memory.
5413 */
5414 switch (pVCpu->iem.s.enmEffOpSize)
5415 {
5416 case IEMMODE_16BIT:
5417 IEM_MC_BEGIN(0, 2, 0);
5418 IEM_MC_LOCAL(uint16_t, u16Value);
5419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5422 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5423 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5424 IEM_MC_ADVANCE_RIP_AND_FINISH();
5425 IEM_MC_END();
5426 break;
5427
5428 case IEMMODE_32BIT:
5429 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386);
5430 IEM_MC_LOCAL(uint32_t, u32Value);
5431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5434 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5435 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5436 IEM_MC_ADVANCE_RIP_AND_FINISH();
5437 IEM_MC_END();
5438 break;
5439
5440 case IEMMODE_64BIT:
5441 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
5442 IEM_MC_LOCAL(uint64_t, u64Value);
5443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5444 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5446 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5447 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5448 IEM_MC_ADVANCE_RIP_AND_FINISH();
5449 IEM_MC_END();
5450 break;
5451
5452 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5453 }
5454 }
5455}
5456
5457
5458/**
5459 * @opcode 0x8a
5460 */
5461FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5462{
5463 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5464
5465 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5466
5467 /*
5468 * If rm is denoting a register, no more instruction bytes.
5469 */
5470 if (IEM_IS_MODRM_REG_MODE(bRm))
5471 {
5472 IEM_MC_BEGIN(0, 1, 0);
5473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5474 IEM_MC_LOCAL(uint8_t, u8Value);
5475 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5476 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5477 IEM_MC_ADVANCE_RIP_AND_FINISH();
5478 IEM_MC_END();
5479 }
5480 else
5481 {
5482 /*
5483 * We're loading a register from memory.
5484 */
5485 IEM_MC_BEGIN(0, 2, 0);
5486 IEM_MC_LOCAL(uint8_t, u8Value);
5487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5490 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5491 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5492 IEM_MC_ADVANCE_RIP_AND_FINISH();
5493 IEM_MC_END();
5494 }
5495}
5496
5497
5498/**
5499 * @opcode 0x8b
5500 */
5501FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5502{
5503 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5504
5505 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5506
5507 /*
5508 * If rm is denoting a register, no more instruction bytes.
5509 */
5510 if (IEM_IS_MODRM_REG_MODE(bRm))
5511 {
5512 switch (pVCpu->iem.s.enmEffOpSize)
5513 {
5514 case IEMMODE_16BIT:
5515 IEM_MC_BEGIN(0, 1, 0);
5516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5517 IEM_MC_LOCAL(uint16_t, u16Value);
5518 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5519 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5520 IEM_MC_ADVANCE_RIP_AND_FINISH();
5521 IEM_MC_END();
5522 break;
5523
5524 case IEMMODE_32BIT:
5525 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
5526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5527 IEM_MC_LOCAL(uint32_t, u32Value);
5528 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5529 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5530 IEM_MC_ADVANCE_RIP_AND_FINISH();
5531 IEM_MC_END();
5532 break;
5533
5534 case IEMMODE_64BIT:
5535 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
5536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5537 IEM_MC_LOCAL(uint64_t, u64Value);
5538 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5539 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5540 IEM_MC_ADVANCE_RIP_AND_FINISH();
5541 IEM_MC_END();
5542 break;
5543
5544 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5545 }
5546 }
5547 else
5548 {
5549 /*
5550 * We're loading a register from memory.
5551 */
5552 switch (pVCpu->iem.s.enmEffOpSize)
5553 {
5554 case IEMMODE_16BIT:
5555 IEM_MC_BEGIN(0, 2, 0);
5556 IEM_MC_LOCAL(uint16_t, u16Value);
5557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5560 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5561 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5562 IEM_MC_ADVANCE_RIP_AND_FINISH();
5563 IEM_MC_END();
5564 break;
5565
5566 case IEMMODE_32BIT:
5567 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386);
5568 IEM_MC_LOCAL(uint32_t, u32Value);
5569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5572 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5573 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5574 IEM_MC_ADVANCE_RIP_AND_FINISH();
5575 IEM_MC_END();
5576 break;
5577
5578 case IEMMODE_64BIT:
5579 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
5580 IEM_MC_LOCAL(uint64_t, u64Value);
5581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5584 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5585 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5586 IEM_MC_ADVANCE_RIP_AND_FINISH();
5587 IEM_MC_END();
5588 break;
5589
5590 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5591 }
5592 }
5593}
5594
5595
5596/**
5597 * opcode 0x63
5598 * @todo Table fixme
5599 */
5600FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5601{
5602 if (!IEM_IS_64BIT_CODE(pVCpu))
5603 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5604 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5605 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5606 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5607}
5608
5609
5610/**
5611 * @opcode 0x8c
5612 */
5613FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5614{
5615 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5616
5617 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5618
5619 /*
5620 * Check that the destination register exists. The REX.R prefix is ignored.
5621 */
5622 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5623 if (iSegReg > X86_SREG_GS)
5624 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5625
5626 /*
5627 * If rm is denoting a register, no more instruction bytes.
5628 * In that case, the operand size is respected and the upper bits are
5629 * cleared (starting with some pentium).
5630 */
5631 if (IEM_IS_MODRM_REG_MODE(bRm))
5632 {
5633 switch (pVCpu->iem.s.enmEffOpSize)
5634 {
5635 case IEMMODE_16BIT:
5636 IEM_MC_BEGIN(0, 1, 0);
5637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5638 IEM_MC_LOCAL(uint16_t, u16Value);
5639 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5640 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5641 IEM_MC_ADVANCE_RIP_AND_FINISH();
5642 IEM_MC_END();
5643 break;
5644
5645 case IEMMODE_32BIT:
5646 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
5647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5648 IEM_MC_LOCAL(uint32_t, u32Value);
5649 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5650 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5651 IEM_MC_ADVANCE_RIP_AND_FINISH();
5652 IEM_MC_END();
5653 break;
5654
5655 case IEMMODE_64BIT:
5656 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
5657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5658 IEM_MC_LOCAL(uint64_t, u64Value);
5659 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5660 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5661 IEM_MC_ADVANCE_RIP_AND_FINISH();
5662 IEM_MC_END();
5663 break;
5664
5665 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5666 }
5667 }
5668 else
5669 {
5670 /*
5671 * We're saving the register to memory. The access is word sized
5672 * regardless of operand size prefixes.
5673 */
5674#if 0 /* not necessary */
5675 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5676#endif
5677 IEM_MC_BEGIN(0, 2, 0);
5678 IEM_MC_LOCAL(uint16_t, u16Value);
5679 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5682 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5683 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5684 IEM_MC_ADVANCE_RIP_AND_FINISH();
5685 IEM_MC_END();
5686 }
5687}
5688
5689
5690
5691
5692/**
5693 * @opcode 0x8d
5694 */
5695FNIEMOP_DEF(iemOp_lea_Gv_M)
5696{
5697 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5698 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5699 if (IEM_IS_MODRM_REG_MODE(bRm))
5700 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5701
5702 switch (pVCpu->iem.s.enmEffOpSize)
5703 {
5704 case IEMMODE_16BIT:
5705 IEM_MC_BEGIN(0, 2, 0);
5706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5707 IEM_MC_LOCAL(uint16_t, u16Cast);
5708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5710 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5711 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5712 IEM_MC_ADVANCE_RIP_AND_FINISH();
5713 IEM_MC_END();
5714 break;
5715
5716 case IEMMODE_32BIT:
5717 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386);
5718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5719 IEM_MC_LOCAL(uint32_t, u32Cast);
5720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5722 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5723 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5724 IEM_MC_ADVANCE_RIP_AND_FINISH();
5725 IEM_MC_END();
5726 break;
5727
5728 case IEMMODE_64BIT:
5729 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
5730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5733 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5734 IEM_MC_ADVANCE_RIP_AND_FINISH();
5735 IEM_MC_END();
5736 break;
5737
5738 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5739 }
5740}
5741
5742
5743/**
5744 * @opcode 0x8e
5745 */
5746FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5747{
5748 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5749
5750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5751
5752 /*
5753 * The practical operand size is 16-bit.
5754 */
5755#if 0 /* not necessary */
5756 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5757#endif
5758
5759 /*
5760 * Check that the destination register exists and can be used with this
5761 * instruction. The REX.R prefix is ignored.
5762 */
5763 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5764 /** @todo r=bird: What does 8086 do here wrt CS? */
5765 if ( iSegReg == X86_SREG_CS
5766 || iSegReg > X86_SREG_GS)
5767 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5768
5769 /*
5770 * If rm is denoting a register, no more instruction bytes.
5771 */
5772 if (IEM_IS_MODRM_REG_MODE(bRm))
5773 {
5774 IEM_MC_BEGIN(2, 0, 0);
5775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5776 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5777 IEM_MC_ARG(uint16_t, u16Value, 1);
5778 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5779 if (iSRegArg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5780 IEM_MC_CALL_CIMPL_2( 0, iemCImpl_load_SReg, iSRegArg, u16Value);
5781 else
5782 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE, iemCImpl_load_SReg, iSRegArg, u16Value);
5783 IEM_MC_END();
5784 }
5785 else
5786 {
5787 /*
5788 * We're loading the register from memory. The access is word sized
5789 * regardless of operand size prefixes.
5790 */
5791 IEM_MC_BEGIN(2, 1, 0);
5792 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5793 IEM_MC_ARG(uint16_t, u16Value, 1);
5794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5797 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5798 if (iSRegArg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5799 IEM_MC_CALL_CIMPL_2( 0, iemCImpl_load_SReg, iSRegArg, u16Value);
5800 else
5801 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE, iemCImpl_load_SReg, iSRegArg, u16Value);
5802 IEM_MC_END();
5803 }
5804}
5805
5806
5807/** Opcode 0x8f /0. */
5808FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5809{
5810 /* This bugger is rather annoying as it requires rSP to be updated before
5811 doing the effective address calculations. Will eventually require a
5812 split between the R/M+SIB decoding and the effective address
5813 calculation - which is something that is required for any attempt at
5814 reusing this code for a recompiler. It may also be good to have if we
5815 need to delay #UD exception caused by invalid lock prefixes.
5816
5817 For now, we'll do a mostly safe interpreter-only implementation here. */
5818 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5819 * now until tests show it's checked.. */
5820 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5821
5822 /* Register access is relatively easy and can share code. */
5823 if (IEM_IS_MODRM_REG_MODE(bRm))
5824 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5825
5826 /*
5827 * Memory target.
5828 *
5829 * Intel says that RSP is incremented before it's used in any effective
5830 * address calcuations. This means some serious extra annoyance here since
5831 * we decode and calculate the effective address in one step and like to
5832 * delay committing registers till everything is done.
5833 *
5834 * So, we'll decode and calculate the effective address twice. This will
5835 * require some recoding if turned into a recompiler.
5836 */
5837 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5838
5839#if 1 /* This can be compiled, optimize later if needed. */
5840 switch (pVCpu->iem.s.enmEffOpSize)
5841 {
5842 case IEMMODE_16BIT:
5843 IEM_MC_BEGIN(2, 0, 0);
5844 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5845 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
5847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5848 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5849 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
5850 IEM_MC_END();
5851 break;
5852
5853 case IEMMODE_32BIT:
5854 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386);
5855 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5856 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
5858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5859 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5860 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
5861 IEM_MC_END();
5862 break;
5863
5864 case IEMMODE_64BIT:
5865 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT);
5866 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5867 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
5869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5870 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5871 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
5872 IEM_MC_END();
5873 break;
5874
5875 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5876 }
5877
5878#else
5879# ifndef TST_IEM_CHECK_MC
5880 /* Calc effective address with modified ESP. */
5881/** @todo testcase */
5882 RTGCPTR GCPtrEff;
5883 VBOXSTRICTRC rcStrict;
5884 switch (pVCpu->iem.s.enmEffOpSize)
5885 {
5886 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
5887 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
5888 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
5889 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5890 }
5891 if (rcStrict != VINF_SUCCESS)
5892 return rcStrict;
5893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5894
5895 /* Perform the operation - this should be CImpl. */
5896 RTUINT64U TmpRsp;
5897 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
5898 switch (pVCpu->iem.s.enmEffOpSize)
5899 {
5900 case IEMMODE_16BIT:
5901 {
5902 uint16_t u16Value;
5903 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
5904 if (rcStrict == VINF_SUCCESS)
5905 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
5906 break;
5907 }
5908
5909 case IEMMODE_32BIT:
5910 {
5911 uint32_t u32Value;
5912 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
5913 if (rcStrict == VINF_SUCCESS)
5914 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
5915 break;
5916 }
5917
5918 case IEMMODE_64BIT:
5919 {
5920 uint64_t u64Value;
5921 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
5922 if (rcStrict == VINF_SUCCESS)
5923 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
5924 break;
5925 }
5926
5927 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5928 }
5929 if (rcStrict == VINF_SUCCESS)
5930 {
5931 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
5932 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
5933 }
5934 return rcStrict;
5935
5936# else
5937 return VERR_IEM_IPE_2;
5938# endif
5939#endif
5940}
5941
5942
5943/**
5944 * @opcode 0x8f
5945 */
5946FNIEMOP_DEF(iemOp_Grp1A__xop)
5947{
5948 /*
5949 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
5950 * three byte VEX prefix, except that the mmmmm field cannot have the values
5951 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
5952 */
5953 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5954 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
5955 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
5956
5957 IEMOP_MNEMONIC(xop, "xop");
5958 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
5959 {
5960 /** @todo Test when exctly the XOP conformance checks kick in during
5961 * instruction decoding and fetching (using \#PF). */
5962 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
5963 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5964 if ( ( pVCpu->iem.s.fPrefixes
5965 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5966 == 0)
5967 {
5968 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
5969 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
5970 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
5971 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
5972 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
5973 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
5974 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
5975 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
5976 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
5977
5978 /** @todo XOP: Just use new tables and decoders. */
5979 switch (bRm & 0x1f)
5980 {
5981 case 8: /* xop opcode map 8. */
5982 IEMOP_BITCH_ABOUT_STUB();
5983 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5984
5985 case 9: /* xop opcode map 9. */
5986 IEMOP_BITCH_ABOUT_STUB();
5987 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5988
5989 case 10: /* xop opcode map 10. */
5990 IEMOP_BITCH_ABOUT_STUB();
5991 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5992
5993 default:
5994 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
5995 IEMOP_RAISE_INVALID_OPCODE_RET();
5996 }
5997 }
5998 else
5999 Log(("XOP: Invalid prefix mix!\n"));
6000 }
6001 else
6002 Log(("XOP: XOP support disabled!\n"));
6003 IEMOP_RAISE_INVALID_OPCODE_RET();
6004}
6005
6006
6007/**
6008 * Common 'xchg reg,rAX' helper.
6009 */
6010FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6011{
6012 iReg |= pVCpu->iem.s.uRexB;
6013 switch (pVCpu->iem.s.enmEffOpSize)
6014 {
6015 case IEMMODE_16BIT:
6016 IEM_MC_BEGIN(0, 2, 0);
6017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6018 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6019 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6020 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6021 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6022 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6023 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6024 IEM_MC_ADVANCE_RIP_AND_FINISH();
6025 IEM_MC_END();
6026 break;
6027
6028 case IEMMODE_32BIT:
6029 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386);
6030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6031 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6032 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6033 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6034 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6035 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6036 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6037 IEM_MC_ADVANCE_RIP_AND_FINISH();
6038 IEM_MC_END();
6039 break;
6040
6041 case IEMMODE_64BIT:
6042 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
6043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6044 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6045 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6046 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6047 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6048 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6049 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6050 IEM_MC_ADVANCE_RIP_AND_FINISH();
6051 IEM_MC_END();
6052 break;
6053
6054 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6055 }
6056}
6057
6058
6059/**
6060 * @opcode 0x90
6061 */
6062FNIEMOP_DEF(iemOp_nop)
6063{
6064 /* R8/R8D and RAX/EAX can be exchanged. */
6065 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6066 {
6067 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6068 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6069 }
6070
6071 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6072 {
6073 IEMOP_MNEMONIC(pause, "pause");
6074 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6075 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6076 if (!IEM_IS_IN_GUEST(pVCpu))
6077 { /* probable */ }
6078#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6079 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6080 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmx_pause);
6081#endif
6082#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6083 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6084 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_svm_pause);
6085#endif
6086 }
6087 else
6088 IEMOP_MNEMONIC(nop, "nop");
6089 /** @todo testcase: lock nop; lock pause */
6090 IEM_MC_BEGIN(0, 0, 0);
6091 IEMOP_HLP_DONE_DECODING();
6092 IEM_MC_ADVANCE_RIP_AND_FINISH();
6093 IEM_MC_END();
6094}
6095
6096
6097/**
6098 * @opcode 0x91
6099 */
6100FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6101{
6102 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6103 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6104}
6105
6106
6107/**
6108 * @opcode 0x92
6109 */
6110FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6111{
6112 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6113 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6114}
6115
6116
6117/**
6118 * @opcode 0x93
6119 */
6120FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6121{
6122 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6123 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6124}
6125
6126
6127/**
6128 * @opcode 0x94
6129 */
6130FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6131{
6132 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6133 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6134}
6135
6136
6137/**
6138 * @opcode 0x95
6139 */
6140FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6141{
6142 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6143 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6144}
6145
6146
6147/**
6148 * @opcode 0x96
6149 */
6150FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6151{
6152 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6153 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6154}
6155
6156
6157/**
6158 * @opcode 0x97
6159 */
6160FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6161{
6162 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6163 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6164}
6165
6166
6167/**
6168 * @opcode 0x98
6169 */
6170FNIEMOP_DEF(iemOp_cbw)
6171{
6172 switch (pVCpu->iem.s.enmEffOpSize)
6173 {
6174 case IEMMODE_16BIT:
6175 IEMOP_MNEMONIC(cbw, "cbw");
6176 IEM_MC_BEGIN(0, 1, 0);
6177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6178 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6179 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6180 } IEM_MC_ELSE() {
6181 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6182 } IEM_MC_ENDIF();
6183 IEM_MC_ADVANCE_RIP_AND_FINISH();
6184 IEM_MC_END();
6185 break;
6186
6187 case IEMMODE_32BIT:
6188 IEMOP_MNEMONIC(cwde, "cwde");
6189 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
6190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6191 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6192 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6193 } IEM_MC_ELSE() {
6194 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6195 } IEM_MC_ENDIF();
6196 IEM_MC_ADVANCE_RIP_AND_FINISH();
6197 IEM_MC_END();
6198 break;
6199
6200 case IEMMODE_64BIT:
6201 IEMOP_MNEMONIC(cdqe, "cdqe");
6202 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
6203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6204 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6205 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6206 } IEM_MC_ELSE() {
6207 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6208 } IEM_MC_ENDIF();
6209 IEM_MC_ADVANCE_RIP_AND_FINISH();
6210 IEM_MC_END();
6211 break;
6212
6213 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6214 }
6215}
6216
6217
6218/**
6219 * @opcode 0x99
6220 */
6221FNIEMOP_DEF(iemOp_cwd)
6222{
6223 switch (pVCpu->iem.s.enmEffOpSize)
6224 {
6225 case IEMMODE_16BIT:
6226 IEMOP_MNEMONIC(cwd, "cwd");
6227 IEM_MC_BEGIN(0, 1, 0);
6228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6229 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6230 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6231 } IEM_MC_ELSE() {
6232 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6233 } IEM_MC_ENDIF();
6234 IEM_MC_ADVANCE_RIP_AND_FINISH();
6235 IEM_MC_END();
6236 break;
6237
6238 case IEMMODE_32BIT:
6239 IEMOP_MNEMONIC(cdq, "cdq");
6240 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
6241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6242 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6243 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6244 } IEM_MC_ELSE() {
6245 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6246 } IEM_MC_ENDIF();
6247 IEM_MC_ADVANCE_RIP_AND_FINISH();
6248 IEM_MC_END();
6249 break;
6250
6251 case IEMMODE_64BIT:
6252 IEMOP_MNEMONIC(cqo, "cqo");
6253 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
6254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6255 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6256 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6257 } IEM_MC_ELSE() {
6258 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6259 } IEM_MC_ENDIF();
6260 IEM_MC_ADVANCE_RIP_AND_FINISH();
6261 IEM_MC_END();
6262 break;
6263
6264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6265 }
6266}
6267
6268
6269/**
6270 * @opcode 0x9a
6271 */
6272FNIEMOP_DEF(iemOp_call_Ap)
6273{
6274 IEMOP_MNEMONIC(call_Ap, "call Ap");
6275 IEMOP_HLP_NO_64BIT();
6276
6277 /* Decode the far pointer address and pass it on to the far call C implementation. */
6278 uint32_t off32Seg;
6279 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6280 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6281 else
6282 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6283 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6285 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
6286 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
6287 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6288}
6289
6290
6291/** Opcode 0x9b. (aka fwait) */
6292FNIEMOP_DEF(iemOp_wait)
6293{
6294 IEMOP_MNEMONIC(wait, "wait");
6295 IEM_MC_BEGIN(0, 0, 0);
6296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6297 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6298 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6299 IEM_MC_ADVANCE_RIP_AND_FINISH();
6300 IEM_MC_END();
6301}
6302
6303
6304/**
6305 * @opcode 0x9c
6306 */
6307FNIEMOP_DEF(iemOp_pushf_Fv)
6308{
6309 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6311 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6312 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6313}
6314
6315
6316/**
6317 * @opcode 0x9d
6318 */
6319FNIEMOP_DEF(iemOp_popf_Fv)
6320{
6321 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6323 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6324 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6325 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6326}
6327
6328
6329/**
6330 * @opcode 0x9e
6331 */
6332FNIEMOP_DEF(iemOp_sahf)
6333{
6334 IEMOP_MNEMONIC(sahf, "sahf");
6335 if ( IEM_IS_64BIT_CODE(pVCpu)
6336 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6337 IEMOP_RAISE_INVALID_OPCODE_RET();
6338 IEM_MC_BEGIN(0, 2, 0);
6339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6340 IEM_MC_LOCAL(uint32_t, u32Flags);
6341 IEM_MC_LOCAL(uint32_t, EFlags);
6342 IEM_MC_FETCH_EFLAGS(EFlags);
6343 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6344 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6345 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6346 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6347 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6348 IEM_MC_COMMIT_EFLAGS(EFlags);
6349 IEM_MC_ADVANCE_RIP_AND_FINISH();
6350 IEM_MC_END();
6351}
6352
6353
6354/**
6355 * @opcode 0x9f
6356 */
6357FNIEMOP_DEF(iemOp_lahf)
6358{
6359 IEMOP_MNEMONIC(lahf, "lahf");
6360 if ( IEM_IS_64BIT_CODE(pVCpu)
6361 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6362 IEMOP_RAISE_INVALID_OPCODE_RET();
6363 IEM_MC_BEGIN(0, 1, 0);
6364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6365 IEM_MC_LOCAL(uint8_t, u8Flags);
6366 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6367 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6368 IEM_MC_ADVANCE_RIP_AND_FINISH();
6369 IEM_MC_END();
6370}
6371
6372
6373/**
6374 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6375 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6376 * Will return/throw on failures.
6377 * @param a_GCPtrMemOff The variable to store the offset in.
6378 */
6379#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6380 do \
6381 { \
6382 switch (pVCpu->iem.s.enmEffAddrMode) \
6383 { \
6384 case IEMMODE_16BIT: \
6385 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6386 break; \
6387 case IEMMODE_32BIT: \
6388 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6389 break; \
6390 case IEMMODE_64BIT: \
6391 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6392 break; \
6393 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6394 } \
6395 } while (0)
6396
6397/**
6398 * @opcode 0xa0
6399 */
6400FNIEMOP_DEF(iemOp_mov_AL_Ob)
6401{
6402 /*
6403 * Get the offset.
6404 */
6405 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6406 RTGCPTR GCPtrMemOff;
6407 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6408
6409 /*
6410 * Fetch AL.
6411 */
6412 IEM_MC_BEGIN(0, 1, 0);
6413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6414 IEM_MC_LOCAL(uint8_t, u8Tmp);
6415 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6416 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6417 IEM_MC_ADVANCE_RIP_AND_FINISH();
6418 IEM_MC_END();
6419}
6420
6421
6422/**
6423 * @opcode 0xa1
6424 */
6425FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6426{
6427 /*
6428 * Get the offset.
6429 */
6430 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6431 RTGCPTR GCPtrMemOff;
6432 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6433
6434 /*
6435 * Fetch rAX.
6436 */
6437 switch (pVCpu->iem.s.enmEffOpSize)
6438 {
6439 case IEMMODE_16BIT:
6440 IEM_MC_BEGIN(0, 1, 0);
6441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6442 IEM_MC_LOCAL(uint16_t, u16Tmp);
6443 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6444 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6445 IEM_MC_ADVANCE_RIP_AND_FINISH();
6446 IEM_MC_END();
6447 break;
6448
6449 case IEMMODE_32BIT:
6450 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
6451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6452 IEM_MC_LOCAL(uint32_t, u32Tmp);
6453 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6454 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6455 IEM_MC_ADVANCE_RIP_AND_FINISH();
6456 IEM_MC_END();
6457 break;
6458
6459 case IEMMODE_64BIT:
6460 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
6461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6462 IEM_MC_LOCAL(uint64_t, u64Tmp);
6463 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6464 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6465 IEM_MC_ADVANCE_RIP_AND_FINISH();
6466 IEM_MC_END();
6467 break;
6468
6469 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6470 }
6471}
6472
6473
6474/**
6475 * @opcode 0xa2
6476 */
6477FNIEMOP_DEF(iemOp_mov_Ob_AL)
6478{
6479 /*
6480 * Get the offset.
6481 */
6482 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6483 RTGCPTR GCPtrMemOff;
6484 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6485
6486 /*
6487 * Store AL.
6488 */
6489 IEM_MC_BEGIN(0, 1, 0);
6490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6491 IEM_MC_LOCAL(uint8_t, u8Tmp);
6492 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6493 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6494 IEM_MC_ADVANCE_RIP_AND_FINISH();
6495 IEM_MC_END();
6496}
6497
6498
6499/**
6500 * @opcode 0xa3
6501 */
6502FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6503{
6504 /*
6505 * Get the offset.
6506 */
6507 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6508 RTGCPTR GCPtrMemOff;
6509 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6510
6511 /*
6512 * Store rAX.
6513 */
6514 switch (pVCpu->iem.s.enmEffOpSize)
6515 {
6516 case IEMMODE_16BIT:
6517 IEM_MC_BEGIN(0, 1, 0);
6518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6519 IEM_MC_LOCAL(uint16_t, u16Tmp);
6520 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6521 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6522 IEM_MC_ADVANCE_RIP_AND_FINISH();
6523 IEM_MC_END();
6524 break;
6525
6526 case IEMMODE_32BIT:
6527 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
6528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6529 IEM_MC_LOCAL(uint32_t, u32Tmp);
6530 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6531 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6532 IEM_MC_ADVANCE_RIP_AND_FINISH();
6533 IEM_MC_END();
6534 break;
6535
6536 case IEMMODE_64BIT:
6537 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
6538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6539 IEM_MC_LOCAL(uint64_t, u64Tmp);
6540 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6541 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6542 IEM_MC_ADVANCE_RIP_AND_FINISH();
6543 IEM_MC_END();
6544 break;
6545
6546 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6547 }
6548}
6549
6550/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6551#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
6552 IEM_MC_BEGIN(0, 2, a_fMcFlags); \
6553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6554 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6555 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6556 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6557 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6558 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6559 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6560 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6561 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6562 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6563 } IEM_MC_ELSE() { \
6564 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6565 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6566 } IEM_MC_ENDIF(); \
6567 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6568 IEM_MC_END() \
6569
6570/**
6571 * @opcode 0xa4
6572 */
6573FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6574{
6575 /*
6576 * Use the C implementation if a repeat prefix is encountered.
6577 */
6578 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6579 {
6580 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6582 switch (pVCpu->iem.s.enmEffAddrMode)
6583 {
6584 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6585 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6586 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6587 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6588 }
6589 }
6590
6591 /*
6592 * Sharing case implementation with movs[wdq] below.
6593 */
6594 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6595 switch (pVCpu->iem.s.enmEffAddrMode)
6596 {
6597 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6598 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6599 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
6600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6601 }
6602}
6603
6604
6605/**
6606 * @opcode 0xa5
6607 */
6608FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6609{
6610
6611 /*
6612 * Use the C implementation if a repeat prefix is encountered.
6613 */
6614 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6615 {
6616 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6618 switch (pVCpu->iem.s.enmEffOpSize)
6619 {
6620 case IEMMODE_16BIT:
6621 switch (pVCpu->iem.s.enmEffAddrMode)
6622 {
6623 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6624 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6625 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6626 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6627 }
6628 break;
6629 case IEMMODE_32BIT:
6630 switch (pVCpu->iem.s.enmEffAddrMode)
6631 {
6632 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6633 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6634 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6636 }
6637 case IEMMODE_64BIT:
6638 switch (pVCpu->iem.s.enmEffAddrMode)
6639 {
6640 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6641 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6642 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6643 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6644 }
6645 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6646 }
6647 }
6648
6649 /*
6650 * Annoying double switch here.
6651 * Using ugly macro for implementing the cases, sharing it with movsb.
6652 */
6653 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6654 switch (pVCpu->iem.s.enmEffOpSize)
6655 {
6656 case IEMMODE_16BIT:
6657 switch (pVCpu->iem.s.enmEffAddrMode)
6658 {
6659 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6660 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6661 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
6662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6663 }
6664 break;
6665
6666 case IEMMODE_32BIT:
6667 switch (pVCpu->iem.s.enmEffAddrMode)
6668 {
6669 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6670 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6671 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
6672 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6673 }
6674 break;
6675
6676 case IEMMODE_64BIT:
6677 switch (pVCpu->iem.s.enmEffAddrMode)
6678 {
6679 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6680 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
6681 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
6682 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6683 }
6684 break;
6685 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6686 }
6687}
6688
6689#undef IEM_MOVS_CASE
6690
6691/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6692#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
6693 IEM_MC_BEGIN(3, 3, a_fMcFlags); \
6694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6695 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
6696 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6697 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6698 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6699 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6700 \
6701 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6702 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
6703 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6704 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
6705 IEM_MC_REF_LOCAL(puValue1, uValue1); \
6706 IEM_MC_REF_EFLAGS(pEFlags); \
6707 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6708 \
6709 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6710 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6711 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6712 } IEM_MC_ELSE() { \
6713 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6714 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6715 } IEM_MC_ENDIF(); \
6716 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6717 IEM_MC_END() \
6718
6719/**
6720 * @opcode 0xa6
6721 */
6722FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6723{
6724
6725 /*
6726 * Use the C implementation if a repeat prefix is encountered.
6727 */
6728 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6729 {
6730 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6732 switch (pVCpu->iem.s.enmEffAddrMode)
6733 {
6734 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6735 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6736 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6737 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6738 }
6739 }
6740 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6741 {
6742 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6744 switch (pVCpu->iem.s.enmEffAddrMode)
6745 {
6746 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6747 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6748 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6749 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6750 }
6751 }
6752
6753 /*
6754 * Sharing case implementation with cmps[wdq] below.
6755 */
6756 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
6757 switch (pVCpu->iem.s.enmEffAddrMode)
6758 {
6759 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6760 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6761 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
6762 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6763 }
6764}
6765
6766
6767/**
6768 * @opcode 0xa7
6769 */
6770FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
6771{
6772 /*
6773 * Use the C implementation if a repeat prefix is encountered.
6774 */
6775 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6776 {
6777 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
6778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6779 switch (pVCpu->iem.s.enmEffOpSize)
6780 {
6781 case IEMMODE_16BIT:
6782 switch (pVCpu->iem.s.enmEffAddrMode)
6783 {
6784 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6785 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6786 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6787 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6788 }
6789 break;
6790 case IEMMODE_32BIT:
6791 switch (pVCpu->iem.s.enmEffAddrMode)
6792 {
6793 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6794 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6795 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6796 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6797 }
6798 case IEMMODE_64BIT:
6799 switch (pVCpu->iem.s.enmEffAddrMode)
6800 {
6801 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
6802 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6803 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6805 }
6806 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6807 }
6808 }
6809
6810 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6811 {
6812 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
6813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6814 switch (pVCpu->iem.s.enmEffOpSize)
6815 {
6816 case IEMMODE_16BIT:
6817 switch (pVCpu->iem.s.enmEffAddrMode)
6818 {
6819 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6820 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6821 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6822 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6823 }
6824 break;
6825 case IEMMODE_32BIT:
6826 switch (pVCpu->iem.s.enmEffAddrMode)
6827 {
6828 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6829 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6830 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6831 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6832 }
6833 case IEMMODE_64BIT:
6834 switch (pVCpu->iem.s.enmEffAddrMode)
6835 {
6836 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
6837 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6838 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6840 }
6841 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6842 }
6843 }
6844
6845 /*
6846 * Annoying double switch here.
6847 * Using ugly macro for implementing the cases, sharing it with cmpsb.
6848 */
6849 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
6850 switch (pVCpu->iem.s.enmEffOpSize)
6851 {
6852 case IEMMODE_16BIT:
6853 switch (pVCpu->iem.s.enmEffAddrMode)
6854 {
6855 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6856 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6857 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
6858 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6859 }
6860 break;
6861
6862 case IEMMODE_32BIT:
6863 switch (pVCpu->iem.s.enmEffAddrMode)
6864 {
6865 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6866 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6867 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
6868 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6869 }
6870 break;
6871
6872 case IEMMODE_64BIT:
6873 switch (pVCpu->iem.s.enmEffAddrMode)
6874 {
6875 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6876 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
6877 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
6878 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6879 }
6880 break;
6881 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6882 }
6883}
6884
6885#undef IEM_CMPS_CASE
6886
6887/**
6888 * @opcode 0xa8
6889 */
6890FNIEMOP_DEF(iemOp_test_AL_Ib)
6891{
6892 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
6893 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6894 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
6895}
6896
6897
6898/**
6899 * @opcode 0xa9
6900 */
6901FNIEMOP_DEF(iemOp_test_eAX_Iz)
6902{
6903 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
6904 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6905 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
6906}
6907
6908
6909/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
6910#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
6911 IEM_MC_BEGIN(0, 2, a_fMcFlags); \
6912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6913 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6914 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6915 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
6916 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6917 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6918 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6919 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6920 } IEM_MC_ELSE() { \
6921 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6922 } IEM_MC_ENDIF(); \
6923 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6924 IEM_MC_END() \
6925
6926/**
6927 * @opcode 0xaa
6928 */
6929FNIEMOP_DEF(iemOp_stosb_Yb_AL)
6930{
6931 /*
6932 * Use the C implementation if a repeat prefix is encountered.
6933 */
6934 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6935 {
6936 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
6937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6938 switch (pVCpu->iem.s.enmEffAddrMode)
6939 {
6940 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m16);
6941 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m32);
6942 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m64);
6943 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6944 }
6945 }
6946
6947 /*
6948 * Sharing case implementation with stos[wdq] below.
6949 */
6950 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
6951 switch (pVCpu->iem.s.enmEffAddrMode)
6952 {
6953 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6954 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6955 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
6956 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6957 }
6958}
6959
6960
6961/**
6962 * @opcode 0xab
6963 */
6964FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
6965{
6966 /*
6967 * Use the C implementation if a repeat prefix is encountered.
6968 */
6969 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6970 {
6971 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
6972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6973 switch (pVCpu->iem.s.enmEffOpSize)
6974 {
6975 case IEMMODE_16BIT:
6976 switch (pVCpu->iem.s.enmEffAddrMode)
6977 {
6978 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m16);
6979 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m32);
6980 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m64);
6981 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6982 }
6983 break;
6984 case IEMMODE_32BIT:
6985 switch (pVCpu->iem.s.enmEffAddrMode)
6986 {
6987 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m16);
6988 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m32);
6989 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m64);
6990 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6991 }
6992 case IEMMODE_64BIT:
6993 switch (pVCpu->iem.s.enmEffAddrMode)
6994 {
6995 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
6996 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m32);
6997 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m64);
6998 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6999 }
7000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7001 }
7002 }
7003
7004 /*
7005 * Annoying double switch here.
7006 * Using ugly macro for implementing the cases, sharing it with stosb.
7007 */
7008 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7009 switch (pVCpu->iem.s.enmEffOpSize)
7010 {
7011 case IEMMODE_16BIT:
7012 switch (pVCpu->iem.s.enmEffAddrMode)
7013 {
7014 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7015 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7016 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7017 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7018 }
7019 break;
7020
7021 case IEMMODE_32BIT:
7022 switch (pVCpu->iem.s.enmEffAddrMode)
7023 {
7024 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7025 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7026 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7027 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7028 }
7029 break;
7030
7031 case IEMMODE_64BIT:
7032 switch (pVCpu->iem.s.enmEffAddrMode)
7033 {
7034 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7035 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7036 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7037 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7038 }
7039 break;
7040 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7041 }
7042}
7043
7044#undef IEM_STOS_CASE
7045
7046/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7047#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7048 IEM_MC_BEGIN(0, 2, a_fMcFlags); \
7049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7050 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7051 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7052 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7053 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7054 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7055 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7056 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7057 } IEM_MC_ELSE() { \
7058 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7059 } IEM_MC_ENDIF(); \
7060 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7061 IEM_MC_END() \
7062
7063/**
7064 * @opcode 0xac
7065 */
7066FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7067{
7068 /*
7069 * Use the C implementation if a repeat prefix is encountered.
7070 */
7071 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7072 {
7073 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7075 switch (pVCpu->iem.s.enmEffAddrMode)
7076 {
7077 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7078 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7079 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7080 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7081 }
7082 }
7083
7084 /*
7085 * Sharing case implementation with stos[wdq] below.
7086 */
7087 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7088 switch (pVCpu->iem.s.enmEffAddrMode)
7089 {
7090 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7091 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7092 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7093 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7094 }
7095}
7096
7097
7098/**
7099 * @opcode 0xad
7100 */
7101FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7102{
7103 /*
7104 * Use the C implementation if a repeat prefix is encountered.
7105 */
7106 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7107 {
7108 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7110 switch (pVCpu->iem.s.enmEffOpSize)
7111 {
7112 case IEMMODE_16BIT:
7113 switch (pVCpu->iem.s.enmEffAddrMode)
7114 {
7115 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7116 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7117 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7118 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7119 }
7120 break;
7121 case IEMMODE_32BIT:
7122 switch (pVCpu->iem.s.enmEffAddrMode)
7123 {
7124 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7125 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7126 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7128 }
7129 case IEMMODE_64BIT:
7130 switch (pVCpu->iem.s.enmEffAddrMode)
7131 {
7132 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7133 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7134 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7136 }
7137 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7138 }
7139 }
7140
7141 /*
7142 * Annoying double switch here.
7143 * Using ugly macro for implementing the cases, sharing it with lodsb.
7144 */
7145 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7146 switch (pVCpu->iem.s.enmEffOpSize)
7147 {
7148 case IEMMODE_16BIT:
7149 switch (pVCpu->iem.s.enmEffAddrMode)
7150 {
7151 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7152 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7153 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
7154 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7155 }
7156 break;
7157
7158 case IEMMODE_32BIT:
7159 switch (pVCpu->iem.s.enmEffAddrMode)
7160 {
7161 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7162 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7163 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
7164 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7165 }
7166 break;
7167
7168 case IEMMODE_64BIT:
7169 switch (pVCpu->iem.s.enmEffAddrMode)
7170 {
7171 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7172 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
7173 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
7174 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7175 }
7176 break;
7177 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7178 }
7179}
7180
7181#undef IEM_LODS_CASE
7182
7183/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7184#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
7185 IEM_MC_BEGIN(3, 2, a_fMcFlags); \
7186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7187 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7188 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7189 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7190 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7191 \
7192 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7193 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7194 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7195 IEM_MC_REF_EFLAGS(pEFlags); \
7196 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7197 \
7198 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7199 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7200 } IEM_MC_ELSE() { \
7201 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7202 } IEM_MC_ENDIF(); \
7203 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7204 IEM_MC_END();
7205
7206/**
7207 * @opcode 0xae
7208 */
7209FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7210{
7211 /*
7212 * Use the C implementation if a repeat prefix is encountered.
7213 */
7214 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7215 {
7216 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7218 switch (pVCpu->iem.s.enmEffAddrMode)
7219 {
7220 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m16);
7221 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m32);
7222 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m64);
7223 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7224 }
7225 }
7226 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7227 {
7228 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7230 switch (pVCpu->iem.s.enmEffAddrMode)
7231 {
7232 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m16);
7233 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m32);
7234 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m64);
7235 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7236 }
7237 }
7238
7239 /*
7240 * Sharing case implementation with stos[wdq] below.
7241 */
7242 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7243 switch (pVCpu->iem.s.enmEffAddrMode)
7244 {
7245 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7246 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7247 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
7248 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7249 }
7250}
7251
7252
7253/**
7254 * @opcode 0xaf
7255 */
7256FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7257{
7258 /*
7259 * Use the C implementation if a repeat prefix is encountered.
7260 */
7261 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7262 {
7263 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7265 switch (pVCpu->iem.s.enmEffOpSize)
7266 {
7267 case IEMMODE_16BIT:
7268 switch (pVCpu->iem.s.enmEffAddrMode)
7269 {
7270 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m16);
7271 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m32);
7272 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m64);
7273 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7274 }
7275 break;
7276 case IEMMODE_32BIT:
7277 switch (pVCpu->iem.s.enmEffAddrMode)
7278 {
7279 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m16);
7280 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m32);
7281 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m64);
7282 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7283 }
7284 case IEMMODE_64BIT:
7285 switch (pVCpu->iem.s.enmEffAddrMode)
7286 {
7287 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7288 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m32);
7289 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m64);
7290 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7291 }
7292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7293 }
7294 }
7295 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7296 {
7297 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7299 switch (pVCpu->iem.s.enmEffOpSize)
7300 {
7301 case IEMMODE_16BIT:
7302 switch (pVCpu->iem.s.enmEffAddrMode)
7303 {
7304 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m16);
7305 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m32);
7306 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m64);
7307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7308 }
7309 break;
7310 case IEMMODE_32BIT:
7311 switch (pVCpu->iem.s.enmEffAddrMode)
7312 {
7313 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m16);
7314 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m32);
7315 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m64);
7316 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7317 }
7318 case IEMMODE_64BIT:
7319 switch (pVCpu->iem.s.enmEffAddrMode)
7320 {
7321 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
7322 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m32);
7323 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m64);
7324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7325 }
7326 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7327 }
7328 }
7329
7330 /*
7331 * Annoying double switch here.
7332 * Using ugly macro for implementing the cases, sharing it with scasb.
7333 */
7334 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
7335 switch (pVCpu->iem.s.enmEffOpSize)
7336 {
7337 case IEMMODE_16BIT:
7338 switch (pVCpu->iem.s.enmEffAddrMode)
7339 {
7340 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7341 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7342 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
7343 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7344 }
7345 break;
7346
7347 case IEMMODE_32BIT:
7348 switch (pVCpu->iem.s.enmEffAddrMode)
7349 {
7350 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7351 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7352 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
7353 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7354 }
7355 break;
7356
7357 case IEMMODE_64BIT:
7358 switch (pVCpu->iem.s.enmEffAddrMode)
7359 {
7360 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7361 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
7362 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
7363 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7364 }
7365 break;
7366 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7367 }
7368}
7369
7370#undef IEM_SCAS_CASE
7371
7372/**
7373 * Common 'mov r8, imm8' helper.
7374 */
7375FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
7376{
7377 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7378 IEM_MC_BEGIN(0, 1, 0);
7379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7380 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
7381 IEM_MC_STORE_GREG_U8(iFixedReg, u8Value);
7382 IEM_MC_ADVANCE_RIP_AND_FINISH();
7383 IEM_MC_END();
7384}
7385
7386
7387/**
7388 * @opcode 0xb0
7389 */
7390FNIEMOP_DEF(iemOp_mov_AL_Ib)
7391{
7392 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
7393 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7394}
7395
7396
7397/**
7398 * @opcode 0xb1
7399 */
7400FNIEMOP_DEF(iemOp_CL_Ib)
7401{
7402 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
7403 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7404}
7405
7406
7407/**
7408 * @opcode 0xb2
7409 */
7410FNIEMOP_DEF(iemOp_DL_Ib)
7411{
7412 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
7413 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7414}
7415
7416
7417/**
7418 * @opcode 0xb3
7419 */
7420FNIEMOP_DEF(iemOp_BL_Ib)
7421{
7422 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
7423 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7424}
7425
7426
7427/**
7428 * @opcode 0xb4
7429 */
7430FNIEMOP_DEF(iemOp_mov_AH_Ib)
7431{
7432 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
7433 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7434}
7435
7436
7437/**
7438 * @opcode 0xb5
7439 */
7440FNIEMOP_DEF(iemOp_CH_Ib)
7441{
7442 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
7443 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7444}
7445
7446
7447/**
7448 * @opcode 0xb6
7449 */
7450FNIEMOP_DEF(iemOp_DH_Ib)
7451{
7452 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
7453 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7454}
7455
7456
7457/**
7458 * @opcode 0xb7
7459 */
7460FNIEMOP_DEF(iemOp_BH_Ib)
7461{
7462 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
7463 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7464}
7465
7466
7467/**
7468 * Common 'mov regX,immX' helper.
7469 */
7470FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
7471{
7472 switch (pVCpu->iem.s.enmEffOpSize)
7473 {
7474 case IEMMODE_16BIT:
7475 IEM_MC_BEGIN(0, 1, 0);
7476 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7478 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
7479 IEM_MC_STORE_GREG_U16(iFixedReg, u16Value);
7480 IEM_MC_ADVANCE_RIP_AND_FINISH();
7481 IEM_MC_END();
7482 break;
7483
7484 case IEMMODE_32BIT:
7485 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
7486 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7488 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
7489 IEM_MC_STORE_GREG_U32(iFixedReg, u32Value);
7490 IEM_MC_ADVANCE_RIP_AND_FINISH();
7491 IEM_MC_END();
7492 break;
7493
7494 case IEMMODE_64BIT:
7495 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
7496 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
7497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7498 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
7499 IEM_MC_STORE_GREG_U64(iFixedReg, u64Value);
7500 IEM_MC_ADVANCE_RIP_AND_FINISH();
7501 IEM_MC_END();
7502 break;
7503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7504 }
7505}
7506
7507
7508/**
7509 * @opcode 0xb8
7510 */
7511FNIEMOP_DEF(iemOp_eAX_Iv)
7512{
7513 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
7514 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7515}
7516
7517
7518/**
7519 * @opcode 0xb9
7520 */
7521FNIEMOP_DEF(iemOp_eCX_Iv)
7522{
7523 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
7524 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7525}
7526
7527
7528/**
7529 * @opcode 0xba
7530 */
7531FNIEMOP_DEF(iemOp_eDX_Iv)
7532{
7533 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
7534 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7535}
7536
7537
7538/**
7539 * @opcode 0xbb
7540 */
7541FNIEMOP_DEF(iemOp_eBX_Iv)
7542{
7543 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
7544 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7545}
7546
7547
7548/**
7549 * @opcode 0xbc
7550 */
7551FNIEMOP_DEF(iemOp_eSP_Iv)
7552{
7553 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
7554 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7555}
7556
7557
7558/**
7559 * @opcode 0xbd
7560 */
7561FNIEMOP_DEF(iemOp_eBP_Iv)
7562{
7563 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
7564 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7565}
7566
7567
7568/**
7569 * @opcode 0xbe
7570 */
7571FNIEMOP_DEF(iemOp_eSI_Iv)
7572{
7573 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
7574 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7575}
7576
7577
7578/**
7579 * @opcode 0xbf
7580 */
7581FNIEMOP_DEF(iemOp_eDI_Iv)
7582{
7583 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
7584 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7585}
7586
7587
7588/**
7589 * @opcode 0xc0
7590 */
7591FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
7592{
7593 IEMOP_HLP_MIN_186();
7594 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7595 PCIEMOPSHIFTSIZES pImpl;
7596 switch (IEM_GET_MODRM_REG_8(bRm))
7597 {
7598 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
7599 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
7600 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
7601 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
7602 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
7603 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
7604 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
7605 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7606 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7607 }
7608 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7609
7610 if (IEM_IS_MODRM_REG_MODE(bRm))
7611 {
7612 /* register */
7613 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7614 IEM_MC_BEGIN(3, 0, 0);
7615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7616 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7617 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7618 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7619 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7620 IEM_MC_REF_EFLAGS(pEFlags);
7621 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7622 IEM_MC_ADVANCE_RIP_AND_FINISH();
7623 IEM_MC_END();
7624 }
7625 else
7626 {
7627 /* memory */
7628 IEM_MC_BEGIN(3, 3, 0);
7629 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7630 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7631 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7632 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7633 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7634
7635 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7636 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7637 IEM_MC_ASSIGN(cShiftArg, cShift);
7638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7639 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7640 IEM_MC_FETCH_EFLAGS(EFlags);
7641 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7642
7643 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
7644 IEM_MC_COMMIT_EFLAGS(EFlags);
7645 IEM_MC_ADVANCE_RIP_AND_FINISH();
7646 IEM_MC_END();
7647 }
7648}
7649
7650
7651/**
7652 * @opcode 0xc1
7653 */
7654FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
7655{
7656 IEMOP_HLP_MIN_186();
7657 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7658 PCIEMOPSHIFTSIZES pImpl;
7659 switch (IEM_GET_MODRM_REG_8(bRm))
7660 {
7661 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
7662 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
7663 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
7664 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
7665 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
7666 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
7667 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
7668 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7669 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7670 }
7671 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7672
7673 if (IEM_IS_MODRM_REG_MODE(bRm))
7674 {
7675 /* register */
7676 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7677 switch (pVCpu->iem.s.enmEffOpSize)
7678 {
7679 case IEMMODE_16BIT:
7680 IEM_MC_BEGIN(3, 0, 0);
7681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7682 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7683 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7684 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7685 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7686 IEM_MC_REF_EFLAGS(pEFlags);
7687 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7688 IEM_MC_ADVANCE_RIP_AND_FINISH();
7689 IEM_MC_END();
7690 break;
7691
7692 case IEMMODE_32BIT:
7693 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386);
7694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7695 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7696 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7697 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7698 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7699 IEM_MC_REF_EFLAGS(pEFlags);
7700 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7701 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7702 IEM_MC_ADVANCE_RIP_AND_FINISH();
7703 IEM_MC_END();
7704 break;
7705
7706 case IEMMODE_64BIT:
7707 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT);
7708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7709 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7710 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7711 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7712 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7713 IEM_MC_REF_EFLAGS(pEFlags);
7714 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7715 IEM_MC_ADVANCE_RIP_AND_FINISH();
7716 IEM_MC_END();
7717 break;
7718
7719 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7720 }
7721 }
7722 else
7723 {
7724 /* memory */
7725 switch (pVCpu->iem.s.enmEffOpSize)
7726 {
7727 case IEMMODE_16BIT:
7728 IEM_MC_BEGIN(3, 3, 0);
7729 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7730 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7731 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7733 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7734
7735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7736 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7737 IEM_MC_ASSIGN(cShiftArg, cShift);
7738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7739 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7740 IEM_MC_FETCH_EFLAGS(EFlags);
7741 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7742
7743 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
7744 IEM_MC_COMMIT_EFLAGS(EFlags);
7745 IEM_MC_ADVANCE_RIP_AND_FINISH();
7746 IEM_MC_END();
7747 break;
7748
7749 case IEMMODE_32BIT:
7750 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386);
7751 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7752 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7753 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7755 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7756
7757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7758 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7759 IEM_MC_ASSIGN(cShiftArg, cShift);
7760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7761 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7762 IEM_MC_FETCH_EFLAGS(EFlags);
7763 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7764
7765 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
7766 IEM_MC_COMMIT_EFLAGS(EFlags);
7767 IEM_MC_ADVANCE_RIP_AND_FINISH();
7768 IEM_MC_END();
7769 break;
7770
7771 case IEMMODE_64BIT:
7772 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT);
7773 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7774 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7775 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7777 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7778
7779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7780 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7781 IEM_MC_ASSIGN(cShiftArg, cShift);
7782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7783 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7784 IEM_MC_FETCH_EFLAGS(EFlags);
7785 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7786
7787 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
7788 IEM_MC_COMMIT_EFLAGS(EFlags);
7789 IEM_MC_ADVANCE_RIP_AND_FINISH();
7790 IEM_MC_END();
7791 break;
7792
7793 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7794 }
7795 }
7796}
7797
7798
7799/**
7800 * @opcode 0xc2
7801 */
7802FNIEMOP_DEF(iemOp_retn_Iw)
7803{
7804 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
7805 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7806 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7808 switch (pVCpu->iem.s.enmEffOpSize)
7809 {
7810 case IEMMODE_16BIT:
7811 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_16, u16Imm);
7812 case IEMMODE_32BIT:
7813 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_32, u16Imm);
7814 case IEMMODE_64BIT:
7815 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_64, u16Imm);
7816 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7817 }
7818}
7819
7820
7821/**
7822 * @opcode 0xc3
7823 */
7824FNIEMOP_DEF(iemOp_retn)
7825{
7826 IEMOP_MNEMONIC(retn, "retn");
7827 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7829 switch (pVCpu->iem.s.enmEffOpSize)
7830 {
7831 case IEMMODE_16BIT:
7832 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_16);
7833 case IEMMODE_32BIT:
7834 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_32);
7835 case IEMMODE_64BIT:
7836 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_64);
7837 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7838 }
7839}
7840
7841
7842/**
7843 * @opcode 0xc4
7844 */
7845FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
7846{
7847 /* The LDS instruction is invalid 64-bit mode. In legacy and
7848 compatability mode it is invalid with MOD=3.
7849 The use as a VEX prefix is made possible by assigning the inverted
7850 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
7851 outside of 64-bit mode. VEX is not available in real or v86 mode. */
7852 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7853 if ( IEM_IS_64BIT_CODE(pVCpu)
7854 || IEM_IS_MODRM_REG_MODE(bRm) )
7855 {
7856 IEMOP_MNEMONIC(vex3_prefix, "vex3");
7857 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7858 {
7859 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7860 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7861 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
7862 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7863 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7864 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
7865 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
7866 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7867 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
7868 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
7869 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
7870 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
7871 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
7872
7873 switch (bRm & 0x1f)
7874 {
7875 case 1: /* 0x0f lead opcode byte. */
7876#ifdef IEM_WITH_VEX
7877 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7878#else
7879 IEMOP_BITCH_ABOUT_STUB();
7880 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7881#endif
7882
7883 case 2: /* 0x0f 0x38 lead opcode bytes. */
7884#ifdef IEM_WITH_VEX
7885 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7886#else
7887 IEMOP_BITCH_ABOUT_STUB();
7888 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7889#endif
7890
7891 case 3: /* 0x0f 0x3a lead opcode bytes. */
7892#ifdef IEM_WITH_VEX
7893 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7894#else
7895 IEMOP_BITCH_ABOUT_STUB();
7896 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7897#endif
7898
7899 default:
7900 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
7901 IEMOP_RAISE_INVALID_OPCODE_RET();
7902 }
7903 }
7904 Log(("VEX3: VEX support disabled!\n"));
7905 IEMOP_RAISE_INVALID_OPCODE_RET();
7906 }
7907
7908 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
7909 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
7910}
7911
7912
7913/**
7914 * @opcode 0xc5
7915 */
7916FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
7917{
7918 /* The LES instruction is invalid 64-bit mode. In legacy and
7919 compatability mode it is invalid with MOD=3.
7920 The use as a VEX prefix is made possible by assigning the inverted
7921 REX.R to the top MOD bit, and the top bit in the inverted register
7922 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
7923 to accessing registers 0..7 in this VEX form. */
7924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7925 if ( IEM_IS_64BIT_CODE(pVCpu)
7926 || IEM_IS_MODRM_REG_MODE(bRm))
7927 {
7928 IEMOP_MNEMONIC(vex2_prefix, "vex2");
7929 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7930 {
7931 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7932 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7933 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7934 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7935 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7936 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
7937 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
7938 pVCpu->iem.s.idxPrefix = bRm & 0x3;
7939
7940#ifdef IEM_WITH_VEX
7941 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7942#else
7943 IEMOP_BITCH_ABOUT_STUB();
7944 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7945#endif
7946 }
7947
7948 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
7949 Log(("VEX2: VEX support disabled!\n"));
7950 IEMOP_RAISE_INVALID_OPCODE_RET();
7951 }
7952
7953 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
7954 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
7955}
7956
7957
7958/**
7959 * @opcode 0xc6
7960 */
7961FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
7962{
7963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7964 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
7965 IEMOP_RAISE_INVALID_OPCODE_RET();
7966 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
7967
7968 if (IEM_IS_MODRM_REG_MODE(bRm))
7969 {
7970 /* register access */
7971 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7972 IEM_MC_BEGIN(0, 0, 0);
7973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7974 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
7975 IEM_MC_ADVANCE_RIP_AND_FINISH();
7976 IEM_MC_END();
7977 }
7978 else
7979 {
7980 /* memory access. */
7981 IEM_MC_BEGIN(0, 1, 0);
7982 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7984 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7986 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
7987 IEM_MC_ADVANCE_RIP_AND_FINISH();
7988 IEM_MC_END();
7989 }
7990}
7991
7992
7993/**
7994 * @opcode 0xc7
7995 */
7996FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
7997{
7998 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7999 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8000 IEMOP_RAISE_INVALID_OPCODE_RET();
8001 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
8002
8003 if (IEM_IS_MODRM_REG_MODE(bRm))
8004 {
8005 /* register access */
8006 switch (pVCpu->iem.s.enmEffOpSize)
8007 {
8008 case IEMMODE_16BIT:
8009 IEM_MC_BEGIN(0, 0, 0);
8010 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8012 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8013 IEM_MC_ADVANCE_RIP_AND_FINISH();
8014 IEM_MC_END();
8015 break;
8016
8017 case IEMMODE_32BIT:
8018 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8019 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8021 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8022 IEM_MC_ADVANCE_RIP_AND_FINISH();
8023 IEM_MC_END();
8024 break;
8025
8026 case IEMMODE_64BIT:
8027 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT);
8028 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8030 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8031 IEM_MC_ADVANCE_RIP_AND_FINISH();
8032 IEM_MC_END();
8033 break;
8034
8035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8036 }
8037 }
8038 else
8039 {
8040 /* memory access. */
8041 switch (pVCpu->iem.s.enmEffOpSize)
8042 {
8043 case IEMMODE_16BIT:
8044 IEM_MC_BEGIN(0, 1, 0);
8045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8047 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8049 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8050 IEM_MC_ADVANCE_RIP_AND_FINISH();
8051 IEM_MC_END();
8052 break;
8053
8054 case IEMMODE_32BIT:
8055 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8057 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8058 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8060 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8061 IEM_MC_ADVANCE_RIP_AND_FINISH();
8062 IEM_MC_END();
8063 break;
8064
8065 case IEMMODE_64BIT:
8066 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
8067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8069 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8071 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8072 IEM_MC_ADVANCE_RIP_AND_FINISH();
8073 IEM_MC_END();
8074 break;
8075
8076 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8077 }
8078 }
8079}
8080
8081
8082
8083
8084/**
8085 * @opcode 0xc8
8086 */
8087FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8088{
8089 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8090 IEMOP_HLP_MIN_186();
8091 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8092 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
8093 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
8094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8095 IEM_MC_DEFER_TO_CIMPL_3_RET(0, iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
8096}
8097
8098
8099/**
8100 * @opcode 0xc9
8101 */
8102FNIEMOP_DEF(iemOp_leave)
8103{
8104 IEMOP_MNEMONIC(leave, "leave");
8105 IEMOP_HLP_MIN_186();
8106 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8108 IEM_MC_DEFER_TO_CIMPL_1_RET(0, iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
8109}
8110
8111
8112/**
8113 * @opcode 0xca
8114 */
8115FNIEMOP_DEF(iemOp_retf_Iw)
8116{
8117 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
8118 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8120 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
8121 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
8122}
8123
8124
8125/**
8126 * @opcode 0xcb
8127 */
8128FNIEMOP_DEF(iemOp_retf)
8129{
8130 IEMOP_MNEMONIC(retf, "retf");
8131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8132 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
8133 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
8134}
8135
8136
8137/**
8138 * @opcode 0xcc
8139 */
8140FNIEMOP_DEF(iemOp_int3)
8141{
8142 IEMOP_MNEMONIC(int3, "int3");
8143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8144 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8145 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8146 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
8147}
8148
8149
8150/**
8151 * @opcode 0xcd
8152 */
8153FNIEMOP_DEF(iemOp_int_Ib)
8154{
8155 IEMOP_MNEMONIC(int_Ib, "int Ib");
8156 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
8157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8158 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8159 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8160 iemCImpl_int, u8Int, IEMINT_INTN);
8161}
8162
8163
8164/**
8165 * @opcode 0xce
8166 */
8167FNIEMOP_DEF(iemOp_into)
8168{
8169 IEMOP_MNEMONIC(into, "into");
8170 IEMOP_HLP_NO_64BIT();
8171 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_CONDITIONAL
8172 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8173 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
8174}
8175
8176
8177/**
8178 * @opcode 0xcf
8179 */
8180FNIEMOP_DEF(iemOp_iret)
8181{
8182 IEMOP_MNEMONIC(iret, "iret");
8183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8184 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8185 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
8186 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
8187}
8188
8189
8190/**
8191 * @opcode 0xd0
8192 */
8193FNIEMOP_DEF(iemOp_Grp2_Eb_1)
8194{
8195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8196 PCIEMOPSHIFTSIZES pImpl;
8197 switch (IEM_GET_MODRM_REG_8(bRm))
8198 {
8199 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
8200 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
8201 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
8202 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
8203 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
8204 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
8205 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
8206 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8207 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8208 }
8209 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8210
8211 if (IEM_IS_MODRM_REG_MODE(bRm))
8212 {
8213 /* register */
8214 IEM_MC_BEGIN(3, 0, 0);
8215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8216 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8217 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8218 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8219 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8220 IEM_MC_REF_EFLAGS(pEFlags);
8221 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8222 IEM_MC_ADVANCE_RIP_AND_FINISH();
8223 IEM_MC_END();
8224 }
8225 else
8226 {
8227 /* memory */
8228 IEM_MC_BEGIN(3, 3, 0);
8229 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8230 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8231 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8233 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8234
8235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8237 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8238 IEM_MC_FETCH_EFLAGS(EFlags);
8239 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8240
8241 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8242 IEM_MC_COMMIT_EFLAGS(EFlags);
8243 IEM_MC_ADVANCE_RIP_AND_FINISH();
8244 IEM_MC_END();
8245 }
8246}
8247
8248
8249
8250/**
8251 * @opcode 0xd1
8252 */
8253FNIEMOP_DEF(iemOp_Grp2_Ev_1)
8254{
8255 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8256 PCIEMOPSHIFTSIZES pImpl;
8257 switch (IEM_GET_MODRM_REG_8(bRm))
8258 {
8259 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
8260 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
8261 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
8262 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
8263 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
8264 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
8265 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
8266 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8267 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8268 }
8269 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8270
8271 if (IEM_IS_MODRM_REG_MODE(bRm))
8272 {
8273 /* register */
8274 switch (pVCpu->iem.s.enmEffOpSize)
8275 {
8276 case IEMMODE_16BIT:
8277 IEM_MC_BEGIN(3, 0, 0);
8278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8279 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8280 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8281 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8282 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8283 IEM_MC_REF_EFLAGS(pEFlags);
8284 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8285 IEM_MC_ADVANCE_RIP_AND_FINISH();
8286 IEM_MC_END();
8287 break;
8288
8289 case IEMMODE_32BIT:
8290 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386);
8291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8292 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8293 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8294 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8295 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8296 IEM_MC_REF_EFLAGS(pEFlags);
8297 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8298 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8299 IEM_MC_ADVANCE_RIP_AND_FINISH();
8300 IEM_MC_END();
8301 break;
8302
8303 case IEMMODE_64BIT:
8304 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT);
8305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8306 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8307 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8308 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8309 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8310 IEM_MC_REF_EFLAGS(pEFlags);
8311 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8312 IEM_MC_ADVANCE_RIP_AND_FINISH();
8313 IEM_MC_END();
8314 break;
8315
8316 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8317 }
8318 }
8319 else
8320 {
8321 /* memory */
8322 switch (pVCpu->iem.s.enmEffOpSize)
8323 {
8324 case IEMMODE_16BIT:
8325 IEM_MC_BEGIN(3, 3, 0);
8326 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8327 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8328 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8330 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8331
8332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8334 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8335 IEM_MC_FETCH_EFLAGS(EFlags);
8336 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8337
8338 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
8339 IEM_MC_COMMIT_EFLAGS(EFlags);
8340 IEM_MC_ADVANCE_RIP_AND_FINISH();
8341 IEM_MC_END();
8342 break;
8343
8344 case IEMMODE_32BIT:
8345 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386);
8346 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8347 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8348 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8350 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8351
8352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8354 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8355 IEM_MC_FETCH_EFLAGS(EFlags);
8356 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8357
8358 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
8359 IEM_MC_COMMIT_EFLAGS(EFlags);
8360 IEM_MC_ADVANCE_RIP_AND_FINISH();
8361 IEM_MC_END();
8362 break;
8363
8364 case IEMMODE_64BIT:
8365 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT);
8366 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8367 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8368 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8370 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8371
8372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8374 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8375 IEM_MC_FETCH_EFLAGS(EFlags);
8376 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8377
8378 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
8379 IEM_MC_COMMIT_EFLAGS(EFlags);
8380 IEM_MC_ADVANCE_RIP_AND_FINISH();
8381 IEM_MC_END();
8382 break;
8383
8384 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8385 }
8386 }
8387}
8388
8389
8390/**
8391 * @opcode 0xd2
8392 */
8393FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
8394{
8395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8396 PCIEMOPSHIFTSIZES pImpl;
8397 switch (IEM_GET_MODRM_REG_8(bRm))
8398 {
8399 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
8400 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
8401 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
8402 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
8403 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
8404 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
8405 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
8406 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8407 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
8408 }
8409 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8410
8411 if (IEM_IS_MODRM_REG_MODE(bRm))
8412 {
8413 /* register */
8414 IEM_MC_BEGIN(3, 0, 0);
8415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8416 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8417 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8418 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8419 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8420 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8421 IEM_MC_REF_EFLAGS(pEFlags);
8422 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8423 IEM_MC_ADVANCE_RIP_AND_FINISH();
8424 IEM_MC_END();
8425 }
8426 else
8427 {
8428 /* memory */
8429 IEM_MC_BEGIN(3, 3, 0);
8430 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8431 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8432 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8434 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8435
8436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8438 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8439 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8440 IEM_MC_FETCH_EFLAGS(EFlags);
8441 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8442
8443 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8444 IEM_MC_COMMIT_EFLAGS(EFlags);
8445 IEM_MC_ADVANCE_RIP_AND_FINISH();
8446 IEM_MC_END();
8447 }
8448}
8449
8450
8451/**
8452 * @opcode 0xd3
8453 */
8454FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
8455{
8456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8457 PCIEMOPSHIFTSIZES pImpl;
8458 switch (IEM_GET_MODRM_REG_8(bRm))
8459 {
8460 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
8461 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
8462 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
8463 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
8464 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
8465 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
8466 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
8467 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8468 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8469 }
8470 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8471
8472 if (IEM_IS_MODRM_REG_MODE(bRm))
8473 {
8474 /* register */
8475 switch (pVCpu->iem.s.enmEffOpSize)
8476 {
8477 case IEMMODE_16BIT:
8478 IEM_MC_BEGIN(3, 0, 0);
8479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8480 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8481 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8482 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8483 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8484 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8485 IEM_MC_REF_EFLAGS(pEFlags);
8486 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8487 IEM_MC_ADVANCE_RIP_AND_FINISH();
8488 IEM_MC_END();
8489 break;
8490
8491 case IEMMODE_32BIT:
8492 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386);
8493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8494 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8495 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8496 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8497 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8498 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8499 IEM_MC_REF_EFLAGS(pEFlags);
8500 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8501 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8502 IEM_MC_ADVANCE_RIP_AND_FINISH();
8503 IEM_MC_END();
8504 break;
8505
8506 case IEMMODE_64BIT:
8507 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT);
8508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8509 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8510 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8511 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8512 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8513 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8514 IEM_MC_REF_EFLAGS(pEFlags);
8515 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8516 IEM_MC_ADVANCE_RIP_AND_FINISH();
8517 IEM_MC_END();
8518 break;
8519
8520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8521 }
8522 }
8523 else
8524 {
8525 /* memory */
8526 switch (pVCpu->iem.s.enmEffOpSize)
8527 {
8528 case IEMMODE_16BIT:
8529 IEM_MC_BEGIN(3, 3, 0);
8530 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8531 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8532 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8534 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8535
8536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8538 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8539 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8540 IEM_MC_FETCH_EFLAGS(EFlags);
8541 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8542
8543 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
8544 IEM_MC_COMMIT_EFLAGS(EFlags);
8545 IEM_MC_ADVANCE_RIP_AND_FINISH();
8546 IEM_MC_END();
8547 break;
8548
8549 case IEMMODE_32BIT:
8550 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386);
8551 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8552 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8553 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8555 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8556
8557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8559 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8560 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8561 IEM_MC_FETCH_EFLAGS(EFlags);
8562 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8563
8564 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
8565 IEM_MC_COMMIT_EFLAGS(EFlags);
8566 IEM_MC_ADVANCE_RIP_AND_FINISH();
8567 IEM_MC_END();
8568 break;
8569
8570 case IEMMODE_64BIT:
8571 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT);
8572 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8573 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8574 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8576 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8577
8578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8580 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8581 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8582 IEM_MC_FETCH_EFLAGS(EFlags);
8583 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8584
8585 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
8586 IEM_MC_COMMIT_EFLAGS(EFlags);
8587 IEM_MC_ADVANCE_RIP_AND_FINISH();
8588 IEM_MC_END();
8589 break;
8590
8591 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8592 }
8593 }
8594}
8595
8596/**
8597 * @opcode 0xd4
8598 */
8599FNIEMOP_DEF(iemOp_aam_Ib)
8600{
8601 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
8602 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
8603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8604 IEMOP_HLP_NO_64BIT();
8605 if (!bImm)
8606 IEMOP_RAISE_DIVIDE_ERROR_RET();
8607 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aam, bImm);
8608}
8609
8610
8611/**
8612 * @opcode 0xd5
8613 */
8614FNIEMOP_DEF(iemOp_aad_Ib)
8615{
8616 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
8617 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
8618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8619 IEMOP_HLP_NO_64BIT();
8620 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aad, bImm);
8621}
8622
8623
8624/**
8625 * @opcode 0xd6
8626 */
8627FNIEMOP_DEF(iemOp_salc)
8628{
8629 IEMOP_MNEMONIC(salc, "salc");
8630 IEMOP_HLP_NO_64BIT();
8631
8632 IEM_MC_BEGIN(0, 0, 0);
8633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8634 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8635 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
8636 } IEM_MC_ELSE() {
8637 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
8638 } IEM_MC_ENDIF();
8639 IEM_MC_ADVANCE_RIP_AND_FINISH();
8640 IEM_MC_END();
8641}
8642
8643
8644/**
8645 * @opcode 0xd7
8646 */
8647FNIEMOP_DEF(iemOp_xlat)
8648{
8649 IEMOP_MNEMONIC(xlat, "xlat");
8650 switch (pVCpu->iem.s.enmEffAddrMode)
8651 {
8652 case IEMMODE_16BIT:
8653 IEM_MC_BEGIN(2, 0, 0);
8654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8655 IEM_MC_LOCAL(uint8_t, u8Tmp);
8656 IEM_MC_LOCAL(uint16_t, u16Addr);
8657 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
8658 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
8659 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
8660 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8661 IEM_MC_ADVANCE_RIP_AND_FINISH();
8662 IEM_MC_END();
8663 break;
8664
8665 case IEMMODE_32BIT:
8666 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386);
8667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8668 IEM_MC_LOCAL(uint8_t, u8Tmp);
8669 IEM_MC_LOCAL(uint32_t, u32Addr);
8670 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
8671 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
8672 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
8673 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8674 IEM_MC_ADVANCE_RIP_AND_FINISH();
8675 IEM_MC_END();
8676 break;
8677
8678 case IEMMODE_64BIT:
8679 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT);
8680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8681 IEM_MC_LOCAL(uint8_t, u8Tmp);
8682 IEM_MC_LOCAL(uint64_t, u64Addr);
8683 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
8684 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
8685 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
8686 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8687 IEM_MC_ADVANCE_RIP_AND_FINISH();
8688 IEM_MC_END();
8689 break;
8690
8691 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8692 }
8693}
8694
8695
8696/**
8697 * Common worker for FPU instructions working on ST0 and STn, and storing the
8698 * result in ST0.
8699 *
8700 * @param bRm Mod R/M byte.
8701 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8702 */
8703FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8704{
8705 IEM_MC_BEGIN(3, 1, 0);
8706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8707 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8708 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8709 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8710 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8711
8712 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8713 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8714 IEM_MC_PREPARE_FPU_USAGE();
8715 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8716 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8717 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8718 } IEM_MC_ELSE() {
8719 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8720 } IEM_MC_ENDIF();
8721 IEM_MC_ADVANCE_RIP_AND_FINISH();
8722
8723 IEM_MC_END();
8724}
8725
8726
8727/**
8728 * Common worker for FPU instructions working on ST0 and STn, and only affecting
8729 * flags.
8730 *
8731 * @param bRm Mod R/M byte.
8732 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8733 */
8734FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8735{
8736 IEM_MC_BEGIN(3, 1, 0);
8737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8738 IEM_MC_LOCAL(uint16_t, u16Fsw);
8739 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8740 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8741 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8742
8743 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8744 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8745 IEM_MC_PREPARE_FPU_USAGE();
8746 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8747 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8748 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8749 } IEM_MC_ELSE() {
8750 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8751 } IEM_MC_ENDIF();
8752 IEM_MC_ADVANCE_RIP_AND_FINISH();
8753
8754 IEM_MC_END();
8755}
8756
8757
8758/**
8759 * Common worker for FPU instructions working on ST0 and STn, only affecting
8760 * flags, and popping when done.
8761 *
8762 * @param bRm Mod R/M byte.
8763 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8764 */
8765FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8766{
8767 IEM_MC_BEGIN(3, 1, 0);
8768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8769 IEM_MC_LOCAL(uint16_t, u16Fsw);
8770 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8771 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8772 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8773
8774 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8775 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8776 IEM_MC_PREPARE_FPU_USAGE();
8777 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8778 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8779 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8780 } IEM_MC_ELSE() {
8781 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8782 } IEM_MC_ENDIF();
8783 IEM_MC_ADVANCE_RIP_AND_FINISH();
8784
8785 IEM_MC_END();
8786}
8787
8788
8789/** Opcode 0xd8 11/0. */
8790FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
8791{
8792 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
8793 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
8794}
8795
8796
8797/** Opcode 0xd8 11/1. */
8798FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
8799{
8800 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
8801 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
8802}
8803
8804
8805/** Opcode 0xd8 11/2. */
8806FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
8807{
8808 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
8809 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
8810}
8811
8812
8813/** Opcode 0xd8 11/3. */
8814FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
8815{
8816 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
8817 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
8818}
8819
8820
8821/** Opcode 0xd8 11/4. */
8822FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
8823{
8824 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
8825 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
8826}
8827
8828
8829/** Opcode 0xd8 11/5. */
8830FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
8831{
8832 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
8833 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
8834}
8835
8836
8837/** Opcode 0xd8 11/6. */
8838FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
8839{
8840 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
8841 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
8842}
8843
8844
8845/** Opcode 0xd8 11/7. */
8846FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
8847{
8848 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
8849 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
8850}
8851
8852
8853/**
8854 * Common worker for FPU instructions working on ST0 and an m32r, and storing
8855 * the result in ST0.
8856 *
8857 * @param bRm Mod R/M byte.
8858 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8859 */
8860FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
8861{
8862 IEM_MC_BEGIN(3, 3, 0);
8863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8864 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8865 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8866 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8867 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8868 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8869
8870 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8872
8873 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8874 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8875 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8876
8877 IEM_MC_PREPARE_FPU_USAGE();
8878 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8879 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
8880 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8881 } IEM_MC_ELSE() {
8882 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8883 } IEM_MC_ENDIF();
8884 IEM_MC_ADVANCE_RIP_AND_FINISH();
8885
8886 IEM_MC_END();
8887}
8888
8889
8890/** Opcode 0xd8 !11/0. */
8891FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
8892{
8893 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
8894 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
8895}
8896
8897
8898/** Opcode 0xd8 !11/1. */
8899FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
8900{
8901 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
8902 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
8903}
8904
8905
8906/** Opcode 0xd8 !11/2. */
8907FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
8908{
8909 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
8910
8911 IEM_MC_BEGIN(3, 3, 0);
8912 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8913 IEM_MC_LOCAL(uint16_t, u16Fsw);
8914 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8915 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8916 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8917 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8918
8919 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8921
8922 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8923 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8924 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8925
8926 IEM_MC_PREPARE_FPU_USAGE();
8927 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8928 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8929 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8930 } IEM_MC_ELSE() {
8931 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8932 } IEM_MC_ENDIF();
8933 IEM_MC_ADVANCE_RIP_AND_FINISH();
8934
8935 IEM_MC_END();
8936}
8937
8938
8939/** Opcode 0xd8 !11/3. */
8940FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
8941{
8942 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
8943
8944 IEM_MC_BEGIN(3, 3, 0);
8945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8946 IEM_MC_LOCAL(uint16_t, u16Fsw);
8947 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8948 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8949 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8950 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8951
8952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8954
8955 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8956 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8957 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8958
8959 IEM_MC_PREPARE_FPU_USAGE();
8960 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8961 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8962 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8963 } IEM_MC_ELSE() {
8964 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8965 } IEM_MC_ENDIF();
8966 IEM_MC_ADVANCE_RIP_AND_FINISH();
8967
8968 IEM_MC_END();
8969}
8970
8971
8972/** Opcode 0xd8 !11/4. */
8973FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
8974{
8975 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
8976 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
8977}
8978
8979
8980/** Opcode 0xd8 !11/5. */
8981FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
8982{
8983 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
8984 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
8985}
8986
8987
8988/** Opcode 0xd8 !11/6. */
8989FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
8990{
8991 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
8992 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
8993}
8994
8995
8996/** Opcode 0xd8 !11/7. */
8997FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
8998{
8999 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
9000 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
9001}
9002
9003
9004/**
9005 * @opcode 0xd8
9006 */
9007FNIEMOP_DEF(iemOp_EscF0)
9008{
9009 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9010 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
9011
9012 if (IEM_IS_MODRM_REG_MODE(bRm))
9013 {
9014 switch (IEM_GET_MODRM_REG_8(bRm))
9015 {
9016 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
9017 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
9018 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
9019 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9020 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
9021 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
9022 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
9023 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
9024 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9025 }
9026 }
9027 else
9028 {
9029 switch (IEM_GET_MODRM_REG_8(bRm))
9030 {
9031 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
9032 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
9033 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
9034 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
9035 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
9036 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
9037 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
9038 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
9039 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9040 }
9041 }
9042}
9043
9044
9045/** Opcode 0xd9 /0 mem32real
9046 * @sa iemOp_fld_m64r */
9047FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
9048{
9049 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
9050
9051 IEM_MC_BEGIN(2, 3, 0);
9052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9053 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9054 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
9055 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9056 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
9057
9058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9060
9061 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9062 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9063 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9064 IEM_MC_PREPARE_FPU_USAGE();
9065 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9066 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
9067 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9068 } IEM_MC_ELSE() {
9069 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9070 } IEM_MC_ENDIF();
9071 IEM_MC_ADVANCE_RIP_AND_FINISH();
9072
9073 IEM_MC_END();
9074}
9075
9076
9077/** Opcode 0xd9 !11/2 mem32real */
9078FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
9079{
9080 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
9081 IEM_MC_BEGIN(3, 2, 0);
9082 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9083 IEM_MC_LOCAL(uint16_t, u16Fsw);
9084 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9085 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9086 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9087
9088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9090 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9091 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9092
9093 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9094 IEM_MC_PREPARE_FPU_USAGE();
9095 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9096 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9097 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9098 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9099 } IEM_MC_ELSE() {
9100 IEM_MC_IF_FCW_IM() {
9101 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9102 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9103 } IEM_MC_ENDIF();
9104 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9105 } IEM_MC_ENDIF();
9106 IEM_MC_ADVANCE_RIP_AND_FINISH();
9107
9108 IEM_MC_END();
9109}
9110
9111
9112/** Opcode 0xd9 !11/3 */
9113FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
9114{
9115 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
9116 IEM_MC_BEGIN(3, 2, 0);
9117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9118 IEM_MC_LOCAL(uint16_t, u16Fsw);
9119 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9120 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9121 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9122
9123 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9125 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9126 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9127
9128 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9129 IEM_MC_PREPARE_FPU_USAGE();
9130 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9131 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9132 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9133 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9134 } IEM_MC_ELSE() {
9135 IEM_MC_IF_FCW_IM() {
9136 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9137 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9138 } IEM_MC_ENDIF();
9139 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9140 } IEM_MC_ENDIF();
9141 IEM_MC_ADVANCE_RIP_AND_FINISH();
9142
9143 IEM_MC_END();
9144}
9145
9146
9147/** Opcode 0xd9 !11/4 */
9148FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
9149{
9150 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
9151 IEM_MC_BEGIN(3, 0, 0);
9152 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9153 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9154 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9157 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9158 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9159 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9160 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9161 IEM_MC_END();
9162}
9163
9164
9165/** Opcode 0xd9 !11/5 */
9166FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
9167{
9168 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
9169 IEM_MC_BEGIN(1, 1, 0);
9170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9171 IEM_MC_ARG(uint16_t, u16Fsw, 0);
9172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9174 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9175 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9176 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9177 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, iemCImpl_fldcw, u16Fsw);
9178 IEM_MC_END();
9179}
9180
9181
9182/** Opcode 0xd9 !11/6 */
9183FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
9184{
9185 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
9186 IEM_MC_BEGIN(3, 0, 0);
9187 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9188 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9189 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9192 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9193 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9194 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9195 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
9196 IEM_MC_END();
9197}
9198
9199
9200/** Opcode 0xd9 !11/7 */
9201FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
9202{
9203 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
9204 IEM_MC_BEGIN(2, 0, 0);
9205 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9206 IEM_MC_LOCAL(uint16_t, u16Fcw);
9207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9209 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9210 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9211 IEM_MC_FETCH_FCW(u16Fcw);
9212 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
9213 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9214 IEM_MC_END();
9215}
9216
9217
9218/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
9219FNIEMOP_DEF(iemOp_fnop)
9220{
9221 IEMOP_MNEMONIC(fnop, "fnop");
9222 IEM_MC_BEGIN(0, 0, 0);
9223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9224 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9225 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9226 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9227 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
9228 * intel optimizations. Investigate. */
9229 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9230 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9231 IEM_MC_END();
9232}
9233
9234
9235/** Opcode 0xd9 11/0 stN */
9236FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
9237{
9238 IEMOP_MNEMONIC(fld_stN, "fld stN");
9239 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9240 * indicates that it does. */
9241 IEM_MC_BEGIN(0, 2, 0);
9242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9243 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9244 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9245 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9246 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9247
9248 IEM_MC_PREPARE_FPU_USAGE();
9249 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
9250 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9251 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9252 } IEM_MC_ELSE() {
9253 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
9254 } IEM_MC_ENDIF();
9255
9256 IEM_MC_ADVANCE_RIP_AND_FINISH();
9257 IEM_MC_END();
9258}
9259
9260
9261/** Opcode 0xd9 11/3 stN */
9262FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
9263{
9264 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
9265 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9266 * indicates that it does. */
9267 IEM_MC_BEGIN(2, 3, 0);
9268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9269 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
9270 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
9271 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9272 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
9273 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
9274 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9275 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9276
9277 IEM_MC_PREPARE_FPU_USAGE();
9278 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9279 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
9280 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
9281 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9282 } IEM_MC_ELSE() {
9283 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
9284 } IEM_MC_ENDIF();
9285
9286 IEM_MC_ADVANCE_RIP_AND_FINISH();
9287 IEM_MC_END();
9288}
9289
9290
9291/** Opcode 0xd9 11/4, 0xdd 11/2. */
9292FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
9293{
9294 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
9295
9296 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
9297 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
9298 if (!iDstReg)
9299 {
9300 IEM_MC_BEGIN(0, 1, 0);
9301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9302 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
9303 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9304 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9305
9306 IEM_MC_PREPARE_FPU_USAGE();
9307 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
9308 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9309 } IEM_MC_ELSE() {
9310 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
9311 } IEM_MC_ENDIF();
9312
9313 IEM_MC_ADVANCE_RIP_AND_FINISH();
9314 IEM_MC_END();
9315 }
9316 else
9317 {
9318 IEM_MC_BEGIN(0, 2, 0);
9319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9320 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9321 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9322 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9323 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9324
9325 IEM_MC_PREPARE_FPU_USAGE();
9326 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9327 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9328 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
9329 } IEM_MC_ELSE() {
9330 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
9331 } IEM_MC_ENDIF();
9332
9333 IEM_MC_ADVANCE_RIP_AND_FINISH();
9334 IEM_MC_END();
9335 }
9336}
9337
9338
9339/**
9340 * Common worker for FPU instructions working on ST0 and replaces it with the
9341 * result, i.e. unary operators.
9342 *
9343 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9344 */
9345FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
9346{
9347 IEM_MC_BEGIN(2, 1, 0);
9348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9349 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9350 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9351 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9352
9353 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9354 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9355 IEM_MC_PREPARE_FPU_USAGE();
9356 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9357 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
9358 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9359 } IEM_MC_ELSE() {
9360 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9361 } IEM_MC_ENDIF();
9362 IEM_MC_ADVANCE_RIP_AND_FINISH();
9363
9364 IEM_MC_END();
9365}
9366
9367
9368/** Opcode 0xd9 0xe0. */
9369FNIEMOP_DEF(iemOp_fchs)
9370{
9371 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
9372 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
9373}
9374
9375
9376/** Opcode 0xd9 0xe1. */
9377FNIEMOP_DEF(iemOp_fabs)
9378{
9379 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
9380 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
9381}
9382
9383
9384/** Opcode 0xd9 0xe4. */
9385FNIEMOP_DEF(iemOp_ftst)
9386{
9387 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
9388 IEM_MC_BEGIN(2, 1, 0);
9389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9390 IEM_MC_LOCAL(uint16_t, u16Fsw);
9391 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9392 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9393
9394 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9395 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9396 IEM_MC_PREPARE_FPU_USAGE();
9397 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9398 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
9399 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9400 } IEM_MC_ELSE() {
9401 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9402 } IEM_MC_ENDIF();
9403 IEM_MC_ADVANCE_RIP_AND_FINISH();
9404
9405 IEM_MC_END();
9406}
9407
9408
9409/** Opcode 0xd9 0xe5. */
9410FNIEMOP_DEF(iemOp_fxam)
9411{
9412 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
9413 IEM_MC_BEGIN(2, 1, 0);
9414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9415 IEM_MC_LOCAL(uint16_t, u16Fsw);
9416 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9417 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9418
9419 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9420 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9421 IEM_MC_PREPARE_FPU_USAGE();
9422 IEM_MC_REF_FPUREG(pr80Value, 0);
9423 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
9424 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9425 IEM_MC_ADVANCE_RIP_AND_FINISH();
9426
9427 IEM_MC_END();
9428}
9429
9430
9431/**
9432 * Common worker for FPU instructions pushing a constant onto the FPU stack.
9433 *
9434 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9435 */
9436FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
9437{
9438 IEM_MC_BEGIN(1, 1, 0);
9439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9440 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9441 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9442
9443 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9444 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9445 IEM_MC_PREPARE_FPU_USAGE();
9446 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9447 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
9448 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9449 } IEM_MC_ELSE() {
9450 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
9451 } IEM_MC_ENDIF();
9452 IEM_MC_ADVANCE_RIP_AND_FINISH();
9453
9454 IEM_MC_END();
9455}
9456
9457
9458/** Opcode 0xd9 0xe8. */
9459FNIEMOP_DEF(iemOp_fld1)
9460{
9461 IEMOP_MNEMONIC(fld1, "fld1");
9462 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
9463}
9464
9465
9466/** Opcode 0xd9 0xe9. */
9467FNIEMOP_DEF(iemOp_fldl2t)
9468{
9469 IEMOP_MNEMONIC(fldl2t, "fldl2t");
9470 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
9471}
9472
9473
9474/** Opcode 0xd9 0xea. */
9475FNIEMOP_DEF(iemOp_fldl2e)
9476{
9477 IEMOP_MNEMONIC(fldl2e, "fldl2e");
9478 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
9479}
9480
9481/** Opcode 0xd9 0xeb. */
9482FNIEMOP_DEF(iemOp_fldpi)
9483{
9484 IEMOP_MNEMONIC(fldpi, "fldpi");
9485 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
9486}
9487
9488
9489/** Opcode 0xd9 0xec. */
9490FNIEMOP_DEF(iemOp_fldlg2)
9491{
9492 IEMOP_MNEMONIC(fldlg2, "fldlg2");
9493 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
9494}
9495
9496/** Opcode 0xd9 0xed. */
9497FNIEMOP_DEF(iemOp_fldln2)
9498{
9499 IEMOP_MNEMONIC(fldln2, "fldln2");
9500 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
9501}
9502
9503
9504/** Opcode 0xd9 0xee. */
9505FNIEMOP_DEF(iemOp_fldz)
9506{
9507 IEMOP_MNEMONIC(fldz, "fldz");
9508 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
9509}
9510
9511
9512/** Opcode 0xd9 0xf0.
9513 *
9514 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
9515 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
9516 * to produce proper results for +Inf and -Inf.
9517 *
9518 * This is probably usful in the implementation pow() and similar.
9519 */
9520FNIEMOP_DEF(iemOp_f2xm1)
9521{
9522 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
9523 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
9524}
9525
9526
9527/**
9528 * Common worker for FPU instructions working on STn and ST0, storing the result
9529 * in STn, and popping the stack unless IE, DE or ZE was raised.
9530 *
9531 * @param bRm Mod R/M byte.
9532 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9533 */
9534FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9535{
9536 IEM_MC_BEGIN(3, 1, 0);
9537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9538 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9539 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9540 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9541 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9542
9543 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9544 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9545
9546 IEM_MC_PREPARE_FPU_USAGE();
9547 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
9548 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9549 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
9550 } IEM_MC_ELSE() {
9551 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
9552 } IEM_MC_ENDIF();
9553 IEM_MC_ADVANCE_RIP_AND_FINISH();
9554
9555 IEM_MC_END();
9556}
9557
9558
9559/** Opcode 0xd9 0xf1. */
9560FNIEMOP_DEF(iemOp_fyl2x)
9561{
9562 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
9563 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
9564}
9565
9566
9567/**
9568 * Common worker for FPU instructions working on ST0 and having two outputs, one
9569 * replacing ST0 and one pushed onto the stack.
9570 *
9571 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9572 */
9573FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
9574{
9575 IEM_MC_BEGIN(2, 1, 0);
9576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9577 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
9578 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
9579 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9580
9581 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9582 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9583 IEM_MC_PREPARE_FPU_USAGE();
9584 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9585 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
9586 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
9587 } IEM_MC_ELSE() {
9588 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
9589 } IEM_MC_ENDIF();
9590 IEM_MC_ADVANCE_RIP_AND_FINISH();
9591
9592 IEM_MC_END();
9593}
9594
9595
9596/** Opcode 0xd9 0xf2. */
9597FNIEMOP_DEF(iemOp_fptan)
9598{
9599 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
9600 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
9601}
9602
9603
9604/** Opcode 0xd9 0xf3. */
9605FNIEMOP_DEF(iemOp_fpatan)
9606{
9607 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
9608 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
9609}
9610
9611
9612/** Opcode 0xd9 0xf4. */
9613FNIEMOP_DEF(iemOp_fxtract)
9614{
9615 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
9616 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
9617}
9618
9619
9620/** Opcode 0xd9 0xf5. */
9621FNIEMOP_DEF(iemOp_fprem1)
9622{
9623 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
9624 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
9625}
9626
9627
9628/** Opcode 0xd9 0xf6. */
9629FNIEMOP_DEF(iemOp_fdecstp)
9630{
9631 IEMOP_MNEMONIC(fdecstp, "fdecstp");
9632 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9633 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9634 * FINCSTP and FDECSTP. */
9635 IEM_MC_BEGIN(0, 0, 0);
9636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9637
9638 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9639 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9640
9641 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9642 IEM_MC_FPU_STACK_DEC_TOP();
9643 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9644
9645 IEM_MC_ADVANCE_RIP_AND_FINISH();
9646 IEM_MC_END();
9647}
9648
9649
9650/** Opcode 0xd9 0xf7. */
9651FNIEMOP_DEF(iemOp_fincstp)
9652{
9653 IEMOP_MNEMONIC(fincstp, "fincstp");
9654 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9655 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9656 * FINCSTP and FDECSTP. */
9657 IEM_MC_BEGIN(0, 0, 0);
9658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9659
9660 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9661 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9662
9663 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9664 IEM_MC_FPU_STACK_INC_TOP();
9665 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9666
9667 IEM_MC_ADVANCE_RIP_AND_FINISH();
9668 IEM_MC_END();
9669}
9670
9671
9672/** Opcode 0xd9 0xf8. */
9673FNIEMOP_DEF(iemOp_fprem)
9674{
9675 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
9676 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
9677}
9678
9679
9680/** Opcode 0xd9 0xf9. */
9681FNIEMOP_DEF(iemOp_fyl2xp1)
9682{
9683 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
9684 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
9685}
9686
9687
9688/** Opcode 0xd9 0xfa. */
9689FNIEMOP_DEF(iemOp_fsqrt)
9690{
9691 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
9692 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
9693}
9694
9695
9696/** Opcode 0xd9 0xfb. */
9697FNIEMOP_DEF(iemOp_fsincos)
9698{
9699 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
9700 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
9701}
9702
9703
9704/** Opcode 0xd9 0xfc. */
9705FNIEMOP_DEF(iemOp_frndint)
9706{
9707 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
9708 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
9709}
9710
9711
9712/** Opcode 0xd9 0xfd. */
9713FNIEMOP_DEF(iemOp_fscale)
9714{
9715 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
9716 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
9717}
9718
9719
9720/** Opcode 0xd9 0xfe. */
9721FNIEMOP_DEF(iemOp_fsin)
9722{
9723 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
9724 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
9725}
9726
9727
9728/** Opcode 0xd9 0xff. */
9729FNIEMOP_DEF(iemOp_fcos)
9730{
9731 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
9732 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
9733}
9734
9735
9736/** Used by iemOp_EscF1. */
9737IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
9738{
9739 /* 0xe0 */ iemOp_fchs,
9740 /* 0xe1 */ iemOp_fabs,
9741 /* 0xe2 */ iemOp_Invalid,
9742 /* 0xe3 */ iemOp_Invalid,
9743 /* 0xe4 */ iemOp_ftst,
9744 /* 0xe5 */ iemOp_fxam,
9745 /* 0xe6 */ iemOp_Invalid,
9746 /* 0xe7 */ iemOp_Invalid,
9747 /* 0xe8 */ iemOp_fld1,
9748 /* 0xe9 */ iemOp_fldl2t,
9749 /* 0xea */ iemOp_fldl2e,
9750 /* 0xeb */ iemOp_fldpi,
9751 /* 0xec */ iemOp_fldlg2,
9752 /* 0xed */ iemOp_fldln2,
9753 /* 0xee */ iemOp_fldz,
9754 /* 0xef */ iemOp_Invalid,
9755 /* 0xf0 */ iemOp_f2xm1,
9756 /* 0xf1 */ iemOp_fyl2x,
9757 /* 0xf2 */ iemOp_fptan,
9758 /* 0xf3 */ iemOp_fpatan,
9759 /* 0xf4 */ iemOp_fxtract,
9760 /* 0xf5 */ iemOp_fprem1,
9761 /* 0xf6 */ iemOp_fdecstp,
9762 /* 0xf7 */ iemOp_fincstp,
9763 /* 0xf8 */ iemOp_fprem,
9764 /* 0xf9 */ iemOp_fyl2xp1,
9765 /* 0xfa */ iemOp_fsqrt,
9766 /* 0xfb */ iemOp_fsincos,
9767 /* 0xfc */ iemOp_frndint,
9768 /* 0xfd */ iemOp_fscale,
9769 /* 0xfe */ iemOp_fsin,
9770 /* 0xff */ iemOp_fcos
9771};
9772
9773
9774/**
9775 * @opcode 0xd9
9776 */
9777FNIEMOP_DEF(iemOp_EscF1)
9778{
9779 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9780 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
9781
9782 if (IEM_IS_MODRM_REG_MODE(bRm))
9783 {
9784 switch (IEM_GET_MODRM_REG_8(bRm))
9785 {
9786 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
9787 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
9788 case 2:
9789 if (bRm == 0xd0)
9790 return FNIEMOP_CALL(iemOp_fnop);
9791 IEMOP_RAISE_INVALID_OPCODE_RET();
9792 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
9793 case 4:
9794 case 5:
9795 case 6:
9796 case 7:
9797 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
9798 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
9799 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9800 }
9801 }
9802 else
9803 {
9804 switch (IEM_GET_MODRM_REG_8(bRm))
9805 {
9806 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
9807 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
9808 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
9809 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
9810 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
9811 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
9812 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
9813 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
9814 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9815 }
9816 }
9817}
9818
9819
9820/** Opcode 0xda 11/0. */
9821FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
9822{
9823 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
9824 IEM_MC_BEGIN(0, 1, 0);
9825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9826 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9827
9828 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9829 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9830
9831 IEM_MC_PREPARE_FPU_USAGE();
9832 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9833 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9834 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9835 } IEM_MC_ENDIF();
9836 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9837 } IEM_MC_ELSE() {
9838 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9839 } IEM_MC_ENDIF();
9840 IEM_MC_ADVANCE_RIP_AND_FINISH();
9841
9842 IEM_MC_END();
9843}
9844
9845
9846/** Opcode 0xda 11/1. */
9847FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
9848{
9849 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
9850 IEM_MC_BEGIN(0, 1, 0);
9851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9852 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9853
9854 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9855 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9856
9857 IEM_MC_PREPARE_FPU_USAGE();
9858 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9859 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9860 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9861 } IEM_MC_ENDIF();
9862 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9863 } IEM_MC_ELSE() {
9864 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9865 } IEM_MC_ENDIF();
9866 IEM_MC_ADVANCE_RIP_AND_FINISH();
9867
9868 IEM_MC_END();
9869}
9870
9871
9872/** Opcode 0xda 11/2. */
9873FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
9874{
9875 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
9876 IEM_MC_BEGIN(0, 1, 0);
9877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9878 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9879
9880 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9881 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9882
9883 IEM_MC_PREPARE_FPU_USAGE();
9884 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9885 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9886 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9887 } IEM_MC_ENDIF();
9888 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9889 } IEM_MC_ELSE() {
9890 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9891 } IEM_MC_ENDIF();
9892 IEM_MC_ADVANCE_RIP_AND_FINISH();
9893
9894 IEM_MC_END();
9895}
9896
9897
9898/** Opcode 0xda 11/3. */
9899FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
9900{
9901 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
9902 IEM_MC_BEGIN(0, 1, 0);
9903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9904 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9905
9906 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9907 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9908
9909 IEM_MC_PREPARE_FPU_USAGE();
9910 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9911 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9912 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9913 } IEM_MC_ENDIF();
9914 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9915 } IEM_MC_ELSE() {
9916 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9917 } IEM_MC_ENDIF();
9918 IEM_MC_ADVANCE_RIP_AND_FINISH();
9919
9920 IEM_MC_END();
9921}
9922
9923
9924/**
9925 * Common worker for FPU instructions working on ST0 and ST1, only affecting
9926 * flags, and popping twice when done.
9927 *
9928 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9929 */
9930FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9931{
9932 IEM_MC_BEGIN(3, 1, 0);
9933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9934 IEM_MC_LOCAL(uint16_t, u16Fsw);
9935 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9936 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9937 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9938
9939 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9940 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9941
9942 IEM_MC_PREPARE_FPU_USAGE();
9943 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
9944 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9945 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9946 } IEM_MC_ELSE() {
9947 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
9948 } IEM_MC_ENDIF();
9949 IEM_MC_ADVANCE_RIP_AND_FINISH();
9950
9951 IEM_MC_END();
9952}
9953
9954
9955/** Opcode 0xda 0xe9. */
9956FNIEMOP_DEF(iemOp_fucompp)
9957{
9958 IEMOP_MNEMONIC(fucompp, "fucompp");
9959 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
9960}
9961
9962
9963/**
9964 * Common worker for FPU instructions working on ST0 and an m32i, and storing
9965 * the result in ST0.
9966 *
9967 * @param bRm Mod R/M byte.
9968 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9969 */
9970FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
9971{
9972 IEM_MC_BEGIN(3, 3, 0);
9973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9974 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9975 IEM_MC_LOCAL(int32_t, i32Val2);
9976 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9977 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9978 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9979
9980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9982
9983 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9984 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9985 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9986
9987 IEM_MC_PREPARE_FPU_USAGE();
9988 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9989 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
9990 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9991 } IEM_MC_ELSE() {
9992 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9993 } IEM_MC_ENDIF();
9994 IEM_MC_ADVANCE_RIP_AND_FINISH();
9995
9996 IEM_MC_END();
9997}
9998
9999
10000/** Opcode 0xda !11/0. */
10001FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
10002{
10003 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
10004 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
10005}
10006
10007
10008/** Opcode 0xda !11/1. */
10009FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
10010{
10011 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
10012 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
10013}
10014
10015
10016/** Opcode 0xda !11/2. */
10017FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
10018{
10019 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
10020
10021 IEM_MC_BEGIN(3, 3, 0);
10022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10023 IEM_MC_LOCAL(uint16_t, u16Fsw);
10024 IEM_MC_LOCAL(int32_t, i32Val2);
10025 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10026 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10027 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10028
10029 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10031
10032 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10033 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10034 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10035
10036 IEM_MC_PREPARE_FPU_USAGE();
10037 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10038 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10039 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10040 } IEM_MC_ELSE() {
10041 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10042 } IEM_MC_ENDIF();
10043 IEM_MC_ADVANCE_RIP_AND_FINISH();
10044
10045 IEM_MC_END();
10046}
10047
10048
10049/** Opcode 0xda !11/3. */
10050FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
10051{
10052 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
10053
10054 IEM_MC_BEGIN(3, 3, 0);
10055 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10056 IEM_MC_LOCAL(uint16_t, u16Fsw);
10057 IEM_MC_LOCAL(int32_t, i32Val2);
10058 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10059 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10060 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10061
10062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10064
10065 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10066 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10067 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10068
10069 IEM_MC_PREPARE_FPU_USAGE();
10070 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10071 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10072 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10073 } IEM_MC_ELSE() {
10074 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10075 } IEM_MC_ENDIF();
10076 IEM_MC_ADVANCE_RIP_AND_FINISH();
10077
10078 IEM_MC_END();
10079}
10080
10081
10082/** Opcode 0xda !11/4. */
10083FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
10084{
10085 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
10086 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
10087}
10088
10089
10090/** Opcode 0xda !11/5. */
10091FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
10092{
10093 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
10094 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
10095}
10096
10097
10098/** Opcode 0xda !11/6. */
10099FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
10100{
10101 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
10102 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
10103}
10104
10105
10106/** Opcode 0xda !11/7. */
10107FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
10108{
10109 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
10110 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
10111}
10112
10113
10114/**
10115 * @opcode 0xda
10116 */
10117FNIEMOP_DEF(iemOp_EscF2)
10118{
10119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10120 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
10121 if (IEM_IS_MODRM_REG_MODE(bRm))
10122 {
10123 switch (IEM_GET_MODRM_REG_8(bRm))
10124 {
10125 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
10126 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
10127 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
10128 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
10129 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10130 case 5:
10131 if (bRm == 0xe9)
10132 return FNIEMOP_CALL(iemOp_fucompp);
10133 IEMOP_RAISE_INVALID_OPCODE_RET();
10134 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10135 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10136 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10137 }
10138 }
10139 else
10140 {
10141 switch (IEM_GET_MODRM_REG_8(bRm))
10142 {
10143 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
10144 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
10145 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
10146 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
10147 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
10148 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
10149 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
10150 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
10151 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10152 }
10153 }
10154}
10155
10156
10157/** Opcode 0xdb !11/0. */
10158FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
10159{
10160 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
10161
10162 IEM_MC_BEGIN(2, 3, 0);
10163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10164 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10165 IEM_MC_LOCAL(int32_t, i32Val);
10166 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10167 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
10168
10169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10171
10172 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10173 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10174 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10175
10176 IEM_MC_PREPARE_FPU_USAGE();
10177 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10178 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
10179 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10180 } IEM_MC_ELSE() {
10181 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10182 } IEM_MC_ENDIF();
10183 IEM_MC_ADVANCE_RIP_AND_FINISH();
10184
10185 IEM_MC_END();
10186}
10187
10188
10189/** Opcode 0xdb !11/1. */
10190FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
10191{
10192 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
10193 IEM_MC_BEGIN(3, 2, 0);
10194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10195 IEM_MC_LOCAL(uint16_t, u16Fsw);
10196 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10197 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10198 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10199
10200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10202 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10203 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10204
10205 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10206 IEM_MC_PREPARE_FPU_USAGE();
10207 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10208 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10209 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10210 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10211 } IEM_MC_ELSE() {
10212 IEM_MC_IF_FCW_IM() {
10213 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10214 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10215 } IEM_MC_ENDIF();
10216 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10217 } IEM_MC_ENDIF();
10218 IEM_MC_ADVANCE_RIP_AND_FINISH();
10219
10220 IEM_MC_END();
10221}
10222
10223
10224/** Opcode 0xdb !11/2. */
10225FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
10226{
10227 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
10228 IEM_MC_BEGIN(3, 2, 0);
10229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10230 IEM_MC_LOCAL(uint16_t, u16Fsw);
10231 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10232 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10233 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10234
10235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10237 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10238 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10239
10240 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10241 IEM_MC_PREPARE_FPU_USAGE();
10242 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10243 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10244 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10245 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10246 } IEM_MC_ELSE() {
10247 IEM_MC_IF_FCW_IM() {
10248 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10249 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10250 } IEM_MC_ENDIF();
10251 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10252 } IEM_MC_ENDIF();
10253 IEM_MC_ADVANCE_RIP_AND_FINISH();
10254
10255 IEM_MC_END();
10256}
10257
10258
10259/** Opcode 0xdb !11/3. */
10260FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
10261{
10262 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
10263 IEM_MC_BEGIN(3, 2, 0);
10264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10265 IEM_MC_LOCAL(uint16_t, u16Fsw);
10266 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10267 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10268 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10269
10270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10272 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10273 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10274
10275 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10276 IEM_MC_PREPARE_FPU_USAGE();
10277 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10278 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10279 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10280 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10281 } IEM_MC_ELSE() {
10282 IEM_MC_IF_FCW_IM() {
10283 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10284 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10285 } IEM_MC_ENDIF();
10286 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10287 } IEM_MC_ENDIF();
10288 IEM_MC_ADVANCE_RIP_AND_FINISH();
10289
10290 IEM_MC_END();
10291}
10292
10293
10294/** Opcode 0xdb !11/5. */
10295FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
10296{
10297 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
10298
10299 IEM_MC_BEGIN(2, 3, 0);
10300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10301 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10302 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
10303 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10304 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
10305
10306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10308
10309 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10310 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10311 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10312
10313 IEM_MC_PREPARE_FPU_USAGE();
10314 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10315 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
10316 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10317 } IEM_MC_ELSE() {
10318 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10319 } IEM_MC_ENDIF();
10320 IEM_MC_ADVANCE_RIP_AND_FINISH();
10321
10322 IEM_MC_END();
10323}
10324
10325
10326/** Opcode 0xdb !11/7. */
10327FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
10328{
10329 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
10330 IEM_MC_BEGIN(3, 2, 0);
10331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10332 IEM_MC_LOCAL(uint16_t, u16Fsw);
10333 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10334 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
10335 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10336
10337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10339 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10340 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10341
10342 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
10343 IEM_MC_PREPARE_FPU_USAGE();
10344 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10345 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
10346 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
10347 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10348 } IEM_MC_ELSE() {
10349 IEM_MC_IF_FCW_IM() {
10350 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
10351 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
10352 } IEM_MC_ENDIF();
10353 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10354 } IEM_MC_ENDIF();
10355 IEM_MC_ADVANCE_RIP_AND_FINISH();
10356
10357 IEM_MC_END();
10358}
10359
10360
10361/** Opcode 0xdb 11/0. */
10362FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
10363{
10364 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
10365 IEM_MC_BEGIN(0, 1, 0);
10366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10367 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10368
10369 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10370 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10371
10372 IEM_MC_PREPARE_FPU_USAGE();
10373 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10374 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
10375 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10376 } IEM_MC_ENDIF();
10377 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10378 } IEM_MC_ELSE() {
10379 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10380 } IEM_MC_ENDIF();
10381 IEM_MC_ADVANCE_RIP_AND_FINISH();
10382
10383 IEM_MC_END();
10384}
10385
10386
10387/** Opcode 0xdb 11/1. */
10388FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
10389{
10390 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
10391 IEM_MC_BEGIN(0, 1, 0);
10392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10393 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10394
10395 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10396 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10397
10398 IEM_MC_PREPARE_FPU_USAGE();
10399 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10400 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10401 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10402 } IEM_MC_ENDIF();
10403 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10404 } IEM_MC_ELSE() {
10405 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10406 } IEM_MC_ENDIF();
10407 IEM_MC_ADVANCE_RIP_AND_FINISH();
10408
10409 IEM_MC_END();
10410}
10411
10412
10413/** Opcode 0xdb 11/2. */
10414FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
10415{
10416 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
10417 IEM_MC_BEGIN(0, 1, 0);
10418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10419 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10420
10421 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10422 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10423
10424 IEM_MC_PREPARE_FPU_USAGE();
10425 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10426 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10427 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10428 } IEM_MC_ENDIF();
10429 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10430 } IEM_MC_ELSE() {
10431 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10432 } IEM_MC_ENDIF();
10433 IEM_MC_ADVANCE_RIP_AND_FINISH();
10434
10435 IEM_MC_END();
10436}
10437
10438
10439/** Opcode 0xdb 11/3. */
10440FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
10441{
10442 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
10443 IEM_MC_BEGIN(0, 1, 0);
10444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10445 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10446
10447 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10448 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10449
10450 IEM_MC_PREPARE_FPU_USAGE();
10451 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10452 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
10453 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10454 } IEM_MC_ENDIF();
10455 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10456 } IEM_MC_ELSE() {
10457 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10458 } IEM_MC_ENDIF();
10459 IEM_MC_ADVANCE_RIP_AND_FINISH();
10460
10461 IEM_MC_END();
10462}
10463
10464
10465/** Opcode 0xdb 0xe0. */
10466FNIEMOP_DEF(iemOp_fneni)
10467{
10468 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
10469 IEM_MC_BEGIN(0, 0, 0);
10470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10471 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10472 IEM_MC_ADVANCE_RIP_AND_FINISH();
10473 IEM_MC_END();
10474}
10475
10476
10477/** Opcode 0xdb 0xe1. */
10478FNIEMOP_DEF(iemOp_fndisi)
10479{
10480 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
10481 IEM_MC_BEGIN(0, 0, 0);
10482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10483 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10484 IEM_MC_ADVANCE_RIP_AND_FINISH();
10485 IEM_MC_END();
10486}
10487
10488
10489/** Opcode 0xdb 0xe2. */
10490FNIEMOP_DEF(iemOp_fnclex)
10491{
10492 IEMOP_MNEMONIC(fnclex, "fnclex");
10493 IEM_MC_BEGIN(0, 0, 0);
10494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10495 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10496 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10497 IEM_MC_CLEAR_FSW_EX();
10498 IEM_MC_ADVANCE_RIP_AND_FINISH();
10499 IEM_MC_END();
10500}
10501
10502
10503/** Opcode 0xdb 0xe3. */
10504FNIEMOP_DEF(iemOp_fninit)
10505{
10506 IEMOP_MNEMONIC(fninit, "fninit");
10507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10508 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, iemCImpl_finit, false /*fCheckXcpts*/);
10509}
10510
10511
10512/** Opcode 0xdb 0xe4. */
10513FNIEMOP_DEF(iemOp_fnsetpm)
10514{
10515 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
10516 IEM_MC_BEGIN(0, 0, 0);
10517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10518 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10519 IEM_MC_ADVANCE_RIP_AND_FINISH();
10520 IEM_MC_END();
10521}
10522
10523
10524/** Opcode 0xdb 0xe5. */
10525FNIEMOP_DEF(iemOp_frstpm)
10526{
10527 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
10528#if 0 /* #UDs on newer CPUs */
10529 IEM_MC_BEGIN(0, 0, 0);
10530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10531 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10532 IEM_MC_ADVANCE_RIP_AND_FINISH();
10533 IEM_MC_END();
10534 return VINF_SUCCESS;
10535#else
10536 IEMOP_RAISE_INVALID_OPCODE_RET();
10537#endif
10538}
10539
10540
10541/** Opcode 0xdb 11/5. */
10542FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
10543{
10544 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
10545 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10546 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fucomi_r80_by_r80,
10547 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10548}
10549
10550
10551/** Opcode 0xdb 11/6. */
10552FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
10553{
10554 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
10555 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10556 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
10557 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10558}
10559
10560
10561/**
10562 * @opcode 0xdb
10563 */
10564FNIEMOP_DEF(iemOp_EscF3)
10565{
10566 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10567 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
10568 if (IEM_IS_MODRM_REG_MODE(bRm))
10569 {
10570 switch (IEM_GET_MODRM_REG_8(bRm))
10571 {
10572 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
10573 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
10574 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
10575 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
10576 case 4:
10577 switch (bRm)
10578 {
10579 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
10580 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
10581 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
10582 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
10583 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
10584 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
10585 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
10586 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
10587 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10588 }
10589 break;
10590 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
10591 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
10592 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10593 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10594 }
10595 }
10596 else
10597 {
10598 switch (IEM_GET_MODRM_REG_8(bRm))
10599 {
10600 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
10601 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
10602 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
10603 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
10604 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10605 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
10606 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10607 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
10608 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10609 }
10610 }
10611}
10612
10613
10614/**
10615 * Common worker for FPU instructions working on STn and ST0, and storing the
10616 * result in STn unless IE, DE or ZE was raised.
10617 *
10618 * @param bRm Mod R/M byte.
10619 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10620 */
10621FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10622{
10623 IEM_MC_BEGIN(3, 1, 0);
10624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10625 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10626 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10627 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10628 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10629
10630 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10631 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10632
10633 IEM_MC_PREPARE_FPU_USAGE();
10634 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10635 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10636 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10637 } IEM_MC_ELSE() {
10638 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10639 } IEM_MC_ENDIF();
10640 IEM_MC_ADVANCE_RIP_AND_FINISH();
10641
10642 IEM_MC_END();
10643}
10644
10645
10646/** Opcode 0xdc 11/0. */
10647FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
10648{
10649 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
10650 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
10651}
10652
10653
10654/** Opcode 0xdc 11/1. */
10655FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
10656{
10657 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
10658 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
10659}
10660
10661
10662/** Opcode 0xdc 11/4. */
10663FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
10664{
10665 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
10666 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
10667}
10668
10669
10670/** Opcode 0xdc 11/5. */
10671FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
10672{
10673 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
10674 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
10675}
10676
10677
10678/** Opcode 0xdc 11/6. */
10679FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
10680{
10681 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
10682 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
10683}
10684
10685
10686/** Opcode 0xdc 11/7. */
10687FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
10688{
10689 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
10690 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
10691}
10692
10693
10694/**
10695 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
10696 * memory operand, and storing the result in ST0.
10697 *
10698 * @param bRm Mod R/M byte.
10699 * @param pfnImpl Pointer to the instruction implementation (assembly).
10700 */
10701FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
10702{
10703 IEM_MC_BEGIN(3, 3, 0);
10704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10705 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10706 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
10707 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10708 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
10709 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
10710
10711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10713 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10714 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10715
10716 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10717 IEM_MC_PREPARE_FPU_USAGE();
10718 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
10719 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
10720 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10721 } IEM_MC_ELSE() {
10722 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10723 } IEM_MC_ENDIF();
10724 IEM_MC_ADVANCE_RIP_AND_FINISH();
10725
10726 IEM_MC_END();
10727}
10728
10729
10730/** Opcode 0xdc !11/0. */
10731FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
10732{
10733 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
10734 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
10735}
10736
10737
10738/** Opcode 0xdc !11/1. */
10739FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
10740{
10741 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
10742 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
10743}
10744
10745
10746/** Opcode 0xdc !11/2. */
10747FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
10748{
10749 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
10750
10751 IEM_MC_BEGIN(3, 3, 0);
10752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10753 IEM_MC_LOCAL(uint16_t, u16Fsw);
10754 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10755 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10756 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10757 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10758
10759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10761
10762 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10763 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10764 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10765
10766 IEM_MC_PREPARE_FPU_USAGE();
10767 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10768 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10769 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10770 } IEM_MC_ELSE() {
10771 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10772 } IEM_MC_ENDIF();
10773 IEM_MC_ADVANCE_RIP_AND_FINISH();
10774
10775 IEM_MC_END();
10776}
10777
10778
10779/** Opcode 0xdc !11/3. */
10780FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
10781{
10782 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
10783
10784 IEM_MC_BEGIN(3, 3, 0);
10785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10786 IEM_MC_LOCAL(uint16_t, u16Fsw);
10787 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10788 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10789 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10790 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10791
10792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10794
10795 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10796 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10797 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10798
10799 IEM_MC_PREPARE_FPU_USAGE();
10800 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10801 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10802 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10803 } IEM_MC_ELSE() {
10804 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10805 } IEM_MC_ENDIF();
10806 IEM_MC_ADVANCE_RIP_AND_FINISH();
10807
10808 IEM_MC_END();
10809}
10810
10811
10812/** Opcode 0xdc !11/4. */
10813FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
10814{
10815 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
10816 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
10817}
10818
10819
10820/** Opcode 0xdc !11/5. */
10821FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
10822{
10823 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
10824 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
10825}
10826
10827
10828/** Opcode 0xdc !11/6. */
10829FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
10830{
10831 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
10832 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
10833}
10834
10835
10836/** Opcode 0xdc !11/7. */
10837FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
10838{
10839 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
10840 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
10841}
10842
10843
10844/**
10845 * @opcode 0xdc
10846 */
10847FNIEMOP_DEF(iemOp_EscF4)
10848{
10849 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10850 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
10851 if (IEM_IS_MODRM_REG_MODE(bRm))
10852 {
10853 switch (IEM_GET_MODRM_REG_8(bRm))
10854 {
10855 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
10856 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
10857 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
10858 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
10859 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
10860 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
10861 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
10862 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
10863 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10864 }
10865 }
10866 else
10867 {
10868 switch (IEM_GET_MODRM_REG_8(bRm))
10869 {
10870 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
10871 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
10872 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
10873 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
10874 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
10875 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
10876 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
10877 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
10878 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10879 }
10880 }
10881}
10882
10883
10884/** Opcode 0xdd !11/0.
10885 * @sa iemOp_fld_m32r */
10886FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
10887{
10888 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
10889
10890 IEM_MC_BEGIN(2, 3, 0);
10891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10892 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10893 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
10894 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10895 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
10896
10897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10899 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10900 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10901
10902 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10903 IEM_MC_PREPARE_FPU_USAGE();
10904 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10905 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
10906 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10907 } IEM_MC_ELSE() {
10908 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10909 } IEM_MC_ENDIF();
10910 IEM_MC_ADVANCE_RIP_AND_FINISH();
10911
10912 IEM_MC_END();
10913}
10914
10915
10916/** Opcode 0xdd !11/0. */
10917FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
10918{
10919 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
10920 IEM_MC_BEGIN(3, 2, 0);
10921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10922 IEM_MC_LOCAL(uint16_t, u16Fsw);
10923 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10924 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10925 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10926
10927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10929 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10930 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10931
10932 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10933 IEM_MC_PREPARE_FPU_USAGE();
10934 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10935 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10936 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10937 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10938 } IEM_MC_ELSE() {
10939 IEM_MC_IF_FCW_IM() {
10940 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10941 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10942 } IEM_MC_ENDIF();
10943 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10944 } IEM_MC_ENDIF();
10945 IEM_MC_ADVANCE_RIP_AND_FINISH();
10946
10947 IEM_MC_END();
10948}
10949
10950
10951/** Opcode 0xdd !11/0. */
10952FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
10953{
10954 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
10955 IEM_MC_BEGIN(3, 2, 0);
10956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10957 IEM_MC_LOCAL(uint16_t, u16Fsw);
10958 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10959 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10960 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10961
10962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10964 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10965 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10966
10967 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10968 IEM_MC_PREPARE_FPU_USAGE();
10969 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10970 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
10971 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10972 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10973 } IEM_MC_ELSE() {
10974 IEM_MC_IF_FCW_IM() {
10975 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
10976 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
10977 } IEM_MC_ENDIF();
10978 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10979 } IEM_MC_ENDIF();
10980 IEM_MC_ADVANCE_RIP_AND_FINISH();
10981
10982 IEM_MC_END();
10983}
10984
10985
10986
10987
10988/** Opcode 0xdd !11/0. */
10989FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
10990{
10991 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
10992 IEM_MC_BEGIN(3, 2, 0);
10993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10994 IEM_MC_LOCAL(uint16_t, u16Fsw);
10995 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10996 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10997 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10998
10999 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11001 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11002 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11003
11004 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11005 IEM_MC_PREPARE_FPU_USAGE();
11006 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11007 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11008 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11009 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11010 } IEM_MC_ELSE() {
11011 IEM_MC_IF_FCW_IM() {
11012 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11013 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11014 } IEM_MC_ENDIF();
11015 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11016 } IEM_MC_ENDIF();
11017 IEM_MC_ADVANCE_RIP_AND_FINISH();
11018
11019 IEM_MC_END();
11020}
11021
11022
11023/** Opcode 0xdd !11/0. */
11024FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
11025{
11026 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
11027 IEM_MC_BEGIN(3, 0, 0);
11028 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11029 IEM_MC_ARG(uint8_t, iEffSeg, 1);
11030 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
11031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11033 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11034 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11035 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11036 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
11037 IEM_MC_END();
11038}
11039
11040
11041/** Opcode 0xdd !11/0. */
11042FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
11043{
11044 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
11045 IEM_MC_BEGIN(3, 0, 0);
11046 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11047 IEM_MC_ARG(uint8_t, iEffSeg, 1);
11048 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
11049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11051 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11052 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
11053 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11054 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
11055 IEM_MC_END();
11056}
11057
11058/** Opcode 0xdd !11/0. */
11059FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
11060{
11061 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
11062
11063 IEM_MC_BEGIN(0, 2, 0);
11064 IEM_MC_LOCAL(uint16_t, u16Tmp);
11065 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11066
11067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11069 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11070
11071 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11072 IEM_MC_FETCH_FSW(u16Tmp);
11073 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
11074 IEM_MC_ADVANCE_RIP_AND_FINISH();
11075
11076/** @todo Debug / drop a hint to the verifier that things may differ
11077 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
11078 * NT4SP1. (X86_FSW_PE) */
11079 IEM_MC_END();
11080}
11081
11082
11083/** Opcode 0xdd 11/0. */
11084FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
11085{
11086 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
11087 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
11088 unmodified. */
11089 IEM_MC_BEGIN(0, 0, 0);
11090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11091
11092 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11093 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11094
11095 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11096 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11097 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11098
11099 IEM_MC_ADVANCE_RIP_AND_FINISH();
11100 IEM_MC_END();
11101}
11102
11103
11104/** Opcode 0xdd 11/1. */
11105FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
11106{
11107 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
11108 IEM_MC_BEGIN(0, 2, 0);
11109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11110 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11111 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11112 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11113 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11114
11115 IEM_MC_PREPARE_FPU_USAGE();
11116 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11117 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11118 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11119 } IEM_MC_ELSE() {
11120 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11121 } IEM_MC_ENDIF();
11122
11123 IEM_MC_ADVANCE_RIP_AND_FINISH();
11124 IEM_MC_END();
11125}
11126
11127
11128/** Opcode 0xdd 11/3. */
11129FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
11130{
11131 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
11132 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
11133}
11134
11135
11136/** Opcode 0xdd 11/4. */
11137FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
11138{
11139 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
11140 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
11141}
11142
11143
11144/**
11145 * @opcode 0xdd
11146 */
11147FNIEMOP_DEF(iemOp_EscF5)
11148{
11149 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11150 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
11151 if (IEM_IS_MODRM_REG_MODE(bRm))
11152 {
11153 switch (IEM_GET_MODRM_REG_8(bRm))
11154 {
11155 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
11156 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
11157 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
11158 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
11159 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
11160 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
11161 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11162 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11163 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11164 }
11165 }
11166 else
11167 {
11168 switch (IEM_GET_MODRM_REG_8(bRm))
11169 {
11170 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
11171 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
11172 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
11173 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
11174 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
11175 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
11176 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
11177 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
11178 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11179 }
11180 }
11181}
11182
11183
11184/** Opcode 0xde 11/0. */
11185FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
11186{
11187 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
11188 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
11189}
11190
11191
11192/** Opcode 0xde 11/0. */
11193FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
11194{
11195 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
11196 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
11197}
11198
11199
11200/** Opcode 0xde 0xd9. */
11201FNIEMOP_DEF(iemOp_fcompp)
11202{
11203 IEMOP_MNEMONIC(fcompp, "fcompp");
11204 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
11205}
11206
11207
11208/** Opcode 0xde 11/4. */
11209FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
11210{
11211 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
11212 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
11213}
11214
11215
11216/** Opcode 0xde 11/5. */
11217FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
11218{
11219 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
11220 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
11221}
11222
11223
11224/** Opcode 0xde 11/6. */
11225FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
11226{
11227 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
11228 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
11229}
11230
11231
11232/** Opcode 0xde 11/7. */
11233FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
11234{
11235 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
11236 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
11237}
11238
11239
11240/**
11241 * Common worker for FPU instructions working on ST0 and an m16i, and storing
11242 * the result in ST0.
11243 *
11244 * @param bRm Mod R/M byte.
11245 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11246 */
11247FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
11248{
11249 IEM_MC_BEGIN(3, 3, 0);
11250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11251 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11252 IEM_MC_LOCAL(int16_t, i16Val2);
11253 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11254 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11255 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11256
11257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11259
11260 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11261 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11262 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11263
11264 IEM_MC_PREPARE_FPU_USAGE();
11265 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11266 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
11267 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11268 } IEM_MC_ELSE() {
11269 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11270 } IEM_MC_ENDIF();
11271 IEM_MC_ADVANCE_RIP_AND_FINISH();
11272
11273 IEM_MC_END();
11274}
11275
11276
11277/** Opcode 0xde !11/0. */
11278FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
11279{
11280 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
11281 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
11282}
11283
11284
11285/** Opcode 0xde !11/1. */
11286FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
11287{
11288 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
11289 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
11290}
11291
11292
11293/** Opcode 0xde !11/2. */
11294FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
11295{
11296 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
11297
11298 IEM_MC_BEGIN(3, 3, 0);
11299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11300 IEM_MC_LOCAL(uint16_t, u16Fsw);
11301 IEM_MC_LOCAL(int16_t, i16Val2);
11302 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11303 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11304 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11305
11306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11308
11309 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11310 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11311 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11312
11313 IEM_MC_PREPARE_FPU_USAGE();
11314 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11315 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11316 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11317 } IEM_MC_ELSE() {
11318 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11319 } IEM_MC_ENDIF();
11320 IEM_MC_ADVANCE_RIP_AND_FINISH();
11321
11322 IEM_MC_END();
11323}
11324
11325
11326/** Opcode 0xde !11/3. */
11327FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
11328{
11329 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
11330
11331 IEM_MC_BEGIN(3, 3, 0);
11332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11333 IEM_MC_LOCAL(uint16_t, u16Fsw);
11334 IEM_MC_LOCAL(int16_t, i16Val2);
11335 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11336 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11337 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11338
11339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11341
11342 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11343 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11344 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11345
11346 IEM_MC_PREPARE_FPU_USAGE();
11347 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11348 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11349 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11350 } IEM_MC_ELSE() {
11351 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11352 } IEM_MC_ENDIF();
11353 IEM_MC_ADVANCE_RIP_AND_FINISH();
11354
11355 IEM_MC_END();
11356}
11357
11358
11359/** Opcode 0xde !11/4. */
11360FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
11361{
11362 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
11363 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
11364}
11365
11366
11367/** Opcode 0xde !11/5. */
11368FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
11369{
11370 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
11371 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
11372}
11373
11374
11375/** Opcode 0xde !11/6. */
11376FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
11377{
11378 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
11379 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
11380}
11381
11382
11383/** Opcode 0xde !11/7. */
11384FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
11385{
11386 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
11387 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
11388}
11389
11390
11391/**
11392 * @opcode 0xde
11393 */
11394FNIEMOP_DEF(iemOp_EscF6)
11395{
11396 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11397 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
11398 if (IEM_IS_MODRM_REG_MODE(bRm))
11399 {
11400 switch (IEM_GET_MODRM_REG_8(bRm))
11401 {
11402 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
11403 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
11404 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
11405 case 3: if (bRm == 0xd9)
11406 return FNIEMOP_CALL(iemOp_fcompp);
11407 IEMOP_RAISE_INVALID_OPCODE_RET();
11408 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
11409 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
11410 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
11411 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
11412 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11413 }
11414 }
11415 else
11416 {
11417 switch (IEM_GET_MODRM_REG_8(bRm))
11418 {
11419 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
11420 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
11421 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
11422 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
11423 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
11424 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
11425 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
11426 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
11427 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11428 }
11429 }
11430}
11431
11432
11433/** Opcode 0xdf 11/0.
11434 * Undocument instruction, assumed to work like ffree + fincstp. */
11435FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
11436{
11437 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
11438 IEM_MC_BEGIN(0, 0, 0);
11439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11440
11441 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11442 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11443
11444 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11445 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11446 IEM_MC_FPU_STACK_INC_TOP();
11447 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11448
11449 IEM_MC_ADVANCE_RIP_AND_FINISH();
11450 IEM_MC_END();
11451}
11452
11453
11454/** Opcode 0xdf 0xe0. */
11455FNIEMOP_DEF(iemOp_fnstsw_ax)
11456{
11457 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
11458 IEM_MC_BEGIN(0, 1, 0);
11459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11460 IEM_MC_LOCAL(uint16_t, u16Tmp);
11461 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11462 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11463 IEM_MC_FETCH_FSW(u16Tmp);
11464 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
11465 IEM_MC_ADVANCE_RIP_AND_FINISH();
11466 IEM_MC_END();
11467}
11468
11469
11470/** Opcode 0xdf 11/5. */
11471FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
11472{
11473 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
11474 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
11475 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
11476 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11477}
11478
11479
11480/** Opcode 0xdf 11/6. */
11481FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
11482{
11483 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
11484 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
11485 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
11486 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11487}
11488
11489
11490/** Opcode 0xdf !11/0. */
11491FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
11492{
11493 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
11494
11495 IEM_MC_BEGIN(2, 3, 0);
11496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11497 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11498 IEM_MC_LOCAL(int16_t, i16Val);
11499 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11500 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
11501
11502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11504
11505 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11506 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11507 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11508
11509 IEM_MC_PREPARE_FPU_USAGE();
11510 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11511 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
11512 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11513 } IEM_MC_ELSE() {
11514 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11515 } IEM_MC_ENDIF();
11516 IEM_MC_ADVANCE_RIP_AND_FINISH();
11517
11518 IEM_MC_END();
11519}
11520
11521
11522/** Opcode 0xdf !11/1. */
11523FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
11524{
11525 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
11526 IEM_MC_BEGIN(3, 2, 0);
11527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11528 IEM_MC_LOCAL(uint16_t, u16Fsw);
11529 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11530 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11531 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11532
11533 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11535 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11536 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11537
11538 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11539 IEM_MC_PREPARE_FPU_USAGE();
11540 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11541 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11542 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11543 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11544 } IEM_MC_ELSE() {
11545 IEM_MC_IF_FCW_IM() {
11546 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11547 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11548 } IEM_MC_ENDIF();
11549 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11550 } IEM_MC_ENDIF();
11551 IEM_MC_ADVANCE_RIP_AND_FINISH();
11552
11553 IEM_MC_END();
11554}
11555
11556
11557/** Opcode 0xdf !11/2. */
11558FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
11559{
11560 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
11561 IEM_MC_BEGIN(3, 2, 0);
11562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11563 IEM_MC_LOCAL(uint16_t, u16Fsw);
11564 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11565 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11566 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11567
11568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11570 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11571 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11572
11573 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11574 IEM_MC_PREPARE_FPU_USAGE();
11575 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11576 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11577 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11578 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11579 } IEM_MC_ELSE() {
11580 IEM_MC_IF_FCW_IM() {
11581 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11582 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11583 } IEM_MC_ENDIF();
11584 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11585 } IEM_MC_ENDIF();
11586 IEM_MC_ADVANCE_RIP_AND_FINISH();
11587
11588 IEM_MC_END();
11589}
11590
11591
11592/** Opcode 0xdf !11/3. */
11593FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
11594{
11595 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
11596 IEM_MC_BEGIN(3, 2, 0);
11597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11598 IEM_MC_LOCAL(uint16_t, u16Fsw);
11599 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11600 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11601 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11602
11603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11605 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11606 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11607
11608 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11609 IEM_MC_PREPARE_FPU_USAGE();
11610 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11611 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11612 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11613 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11614 } IEM_MC_ELSE() {
11615 IEM_MC_IF_FCW_IM() {
11616 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11617 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11618 } IEM_MC_ENDIF();
11619 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11620 } IEM_MC_ENDIF();
11621 IEM_MC_ADVANCE_RIP_AND_FINISH();
11622
11623 IEM_MC_END();
11624}
11625
11626
11627/** Opcode 0xdf !11/4. */
11628FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
11629{
11630 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
11631
11632 IEM_MC_BEGIN(2, 3, 0);
11633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11634 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11635 IEM_MC_LOCAL(RTPBCD80U, d80Val);
11636 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11637 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
11638
11639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11641
11642 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11643 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11644 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11645
11646 IEM_MC_PREPARE_FPU_USAGE();
11647 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11648 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
11649 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11650 } IEM_MC_ELSE() {
11651 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11652 } IEM_MC_ENDIF();
11653 IEM_MC_ADVANCE_RIP_AND_FINISH();
11654
11655 IEM_MC_END();
11656}
11657
11658
11659/** Opcode 0xdf !11/5. */
11660FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
11661{
11662 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
11663
11664 IEM_MC_BEGIN(2, 3, 0);
11665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11666 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11667 IEM_MC_LOCAL(int64_t, i64Val);
11668 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11669 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
11670
11671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11673
11674 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11675 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11676 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11677
11678 IEM_MC_PREPARE_FPU_USAGE();
11679 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11680 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
11681 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11682 } IEM_MC_ELSE() {
11683 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11684 } IEM_MC_ENDIF();
11685 IEM_MC_ADVANCE_RIP_AND_FINISH();
11686
11687 IEM_MC_END();
11688}
11689
11690
11691/** Opcode 0xdf !11/6. */
11692FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
11693{
11694 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
11695 IEM_MC_BEGIN(3, 2, 0);
11696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11697 IEM_MC_LOCAL(uint16_t, u16Fsw);
11698 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11699 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
11700 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11701
11702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11704 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11705 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11706
11707 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
11708 IEM_MC_PREPARE_FPU_USAGE();
11709 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11710 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
11711 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
11712 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11713 } IEM_MC_ELSE() {
11714 IEM_MC_IF_FCW_IM() {
11715 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
11716 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
11717 } IEM_MC_ENDIF();
11718 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11719 } IEM_MC_ENDIF();
11720 IEM_MC_ADVANCE_RIP_AND_FINISH();
11721
11722 IEM_MC_END();
11723}
11724
11725
11726/** Opcode 0xdf !11/7. */
11727FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
11728{
11729 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
11730 IEM_MC_BEGIN(3, 2, 0);
11731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11732 IEM_MC_LOCAL(uint16_t, u16Fsw);
11733 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11734 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11735 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11736
11737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11739 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11740 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11741
11742 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11743 IEM_MC_PREPARE_FPU_USAGE();
11744 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11745 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11746 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11747 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11748 } IEM_MC_ELSE() {
11749 IEM_MC_IF_FCW_IM() {
11750 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11751 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
11752 } IEM_MC_ENDIF();
11753 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11754 } IEM_MC_ENDIF();
11755 IEM_MC_ADVANCE_RIP_AND_FINISH();
11756
11757 IEM_MC_END();
11758}
11759
11760
11761/**
11762 * @opcode 0xdf
11763 */
11764FNIEMOP_DEF(iemOp_EscF7)
11765{
11766 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11767 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
11768 if (IEM_IS_MODRM_REG_MODE(bRm))
11769 {
11770 switch (IEM_GET_MODRM_REG_8(bRm))
11771 {
11772 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
11773 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
11774 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11775 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11776 case 4: if (bRm == 0xe0)
11777 return FNIEMOP_CALL(iemOp_fnstsw_ax);
11778 IEMOP_RAISE_INVALID_OPCODE_RET();
11779 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
11780 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
11781 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11782 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11783 }
11784 }
11785 else
11786 {
11787 switch (IEM_GET_MODRM_REG_8(bRm))
11788 {
11789 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
11790 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
11791 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
11792 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
11793 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
11794 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
11795 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
11796 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
11797 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11798 }
11799 }
11800}
11801
11802
11803/**
11804 * @opcode 0xe0
11805 */
11806FNIEMOP_DEF(iemOp_loopne_Jb)
11807{
11808 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
11809 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11810 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11811
11812 switch (pVCpu->iem.s.enmEffAddrMode)
11813 {
11814 case IEMMODE_16BIT:
11815 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT);
11816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11817 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11818 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11819 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11820 } IEM_MC_ELSE() {
11821 IEM_MC_ADVANCE_RIP_AND_FINISH();
11822 } IEM_MC_ENDIF();
11823 IEM_MC_END();
11824 break;
11825
11826 case IEMMODE_32BIT:
11827 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
11828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11829 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11830 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11831 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11832 } IEM_MC_ELSE() {
11833 IEM_MC_ADVANCE_RIP_AND_FINISH();
11834 } IEM_MC_ENDIF();
11835 IEM_MC_END();
11836 break;
11837
11838 case IEMMODE_64BIT:
11839 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT);
11840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11841 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11842 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11843 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11844 } IEM_MC_ELSE() {
11845 IEM_MC_ADVANCE_RIP_AND_FINISH();
11846 } IEM_MC_ENDIF();
11847 IEM_MC_END();
11848 break;
11849
11850 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11851 }
11852}
11853
11854
11855/**
11856 * @opcode 0xe1
11857 */
11858FNIEMOP_DEF(iemOp_loope_Jb)
11859{
11860 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
11861 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11862 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11863
11864 switch (pVCpu->iem.s.enmEffAddrMode)
11865 {
11866 case IEMMODE_16BIT:
11867 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT);
11868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11869 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11870 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11871 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11872 } IEM_MC_ELSE() {
11873 IEM_MC_ADVANCE_RIP_AND_FINISH();
11874 } IEM_MC_ENDIF();
11875 IEM_MC_END();
11876 break;
11877
11878 case IEMMODE_32BIT:
11879 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
11880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11881 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11882 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11883 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11884 } IEM_MC_ELSE() {
11885 IEM_MC_ADVANCE_RIP_AND_FINISH();
11886 } IEM_MC_ENDIF();
11887 IEM_MC_END();
11888 break;
11889
11890 case IEMMODE_64BIT:
11891 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT);
11892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11893 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11894 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11895 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11896 } IEM_MC_ELSE() {
11897 IEM_MC_ADVANCE_RIP_AND_FINISH();
11898 } IEM_MC_ENDIF();
11899 IEM_MC_END();
11900 break;
11901
11902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11903 }
11904}
11905
11906
11907/**
11908 * @opcode 0xe2
11909 */
11910FNIEMOP_DEF(iemOp_loop_Jb)
11911{
11912 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
11913 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11914 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11915
11916 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
11917 * using the 32-bit operand size override. How can that be restarted? See
11918 * weird pseudo code in intel manual. */
11919
11920 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
11921 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
11922 * the loop causes guest crashes, but when logging it's nice to skip a few million
11923 * lines of useless output. */
11924#if defined(LOG_ENABLED)
11925 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
11926 switch (pVCpu->iem.s.enmEffAddrMode)
11927 {
11928 case IEMMODE_16BIT:
11929 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT);
11930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11931 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
11932 IEM_MC_ADVANCE_RIP_AND_FINISH();
11933 IEM_MC_END();
11934 break;
11935
11936 case IEMMODE_32BIT:
11937 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
11938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11939 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
11940 IEM_MC_ADVANCE_RIP_AND_FINISH();
11941 IEM_MC_END();
11942 break;
11943
11944 case IEMMODE_64BIT:
11945 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT);
11946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11947 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
11948 IEM_MC_ADVANCE_RIP_AND_FINISH();
11949 IEM_MC_END();
11950 break;
11951
11952 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11953 }
11954#endif
11955
11956 switch (pVCpu->iem.s.enmEffAddrMode)
11957 {
11958 case IEMMODE_16BIT:
11959 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT);
11960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11961 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11962 IEM_MC_IF_CX_IS_NZ() {
11963 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11964 } IEM_MC_ELSE() {
11965 IEM_MC_ADVANCE_RIP_AND_FINISH();
11966 } IEM_MC_ENDIF();
11967 IEM_MC_END();
11968 break;
11969
11970 case IEMMODE_32BIT:
11971 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
11972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11973 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11974 IEM_MC_IF_ECX_IS_NZ() {
11975 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11976 } IEM_MC_ELSE() {
11977 IEM_MC_ADVANCE_RIP_AND_FINISH();
11978 } IEM_MC_ENDIF();
11979 IEM_MC_END();
11980 break;
11981
11982 case IEMMODE_64BIT:
11983 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT);
11984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11985 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11986 IEM_MC_IF_RCX_IS_NZ() {
11987 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11988 } IEM_MC_ELSE() {
11989 IEM_MC_ADVANCE_RIP_AND_FINISH();
11990 } IEM_MC_ENDIF();
11991 IEM_MC_END();
11992 break;
11993
11994 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11995 }
11996}
11997
11998
11999/**
12000 * @opcode 0xe3
12001 */
12002FNIEMOP_DEF(iemOp_jecxz_Jb)
12003{
12004 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
12005 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12006 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12007
12008 switch (pVCpu->iem.s.enmEffAddrMode)
12009 {
12010 case IEMMODE_16BIT:
12011 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT);
12012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12013 IEM_MC_IF_CX_IS_NZ() {
12014 IEM_MC_ADVANCE_RIP_AND_FINISH();
12015 } IEM_MC_ELSE() {
12016 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12017 } IEM_MC_ENDIF();
12018 IEM_MC_END();
12019 break;
12020
12021 case IEMMODE_32BIT:
12022 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
12023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12024 IEM_MC_IF_ECX_IS_NZ() {
12025 IEM_MC_ADVANCE_RIP_AND_FINISH();
12026 } IEM_MC_ELSE() {
12027 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12028 } IEM_MC_ENDIF();
12029 IEM_MC_END();
12030 break;
12031
12032 case IEMMODE_64BIT:
12033 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT);
12034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12035 IEM_MC_IF_RCX_IS_NZ() {
12036 IEM_MC_ADVANCE_RIP_AND_FINISH();
12037 } IEM_MC_ELSE() {
12038 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12039 } IEM_MC_ENDIF();
12040 IEM_MC_END();
12041 break;
12042
12043 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12044 }
12045}
12046
12047
12048/** Opcode 0xe4 */
12049FNIEMOP_DEF(iemOp_in_AL_Ib)
12050{
12051 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
12052 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12054 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12055 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12056}
12057
12058
12059/** Opcode 0xe5 */
12060FNIEMOP_DEF(iemOp_in_eAX_Ib)
12061{
12062 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
12063 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12065 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12066 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12067 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12068}
12069
12070
12071/** Opcode 0xe6 */
12072FNIEMOP_DEF(iemOp_out_Ib_AL)
12073{
12074 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
12075 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12077 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12078 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12079}
12080
12081
12082/** Opcode 0xe7 */
12083FNIEMOP_DEF(iemOp_out_Ib_eAX)
12084{
12085 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
12086 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12088 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12089 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12090 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12091}
12092
12093
12094/**
12095 * @opcode 0xe8
12096 */
12097FNIEMOP_DEF(iemOp_call_Jv)
12098{
12099 IEMOP_MNEMONIC(call_Jv, "call Jv");
12100 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12101 switch (pVCpu->iem.s.enmEffOpSize)
12102 {
12103 case IEMMODE_16BIT:
12104 {
12105 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12106 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_16, (int16_t)u16Imm);
12107 }
12108
12109 case IEMMODE_32BIT:
12110 {
12111 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12112 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_32, (int32_t)u32Imm);
12113 }
12114
12115 case IEMMODE_64BIT:
12116 {
12117 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12118 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_64, u64Imm);
12119 }
12120
12121 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12122 }
12123}
12124
12125
12126/**
12127 * @opcode 0xe9
12128 */
12129FNIEMOP_DEF(iemOp_jmp_Jv)
12130{
12131 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
12132 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12133 switch (pVCpu->iem.s.enmEffOpSize)
12134 {
12135 case IEMMODE_16BIT:
12136 IEM_MC_BEGIN(0, 0, 0);
12137 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
12138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12139 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
12140 IEM_MC_END();
12141 break;
12142
12143 case IEMMODE_64BIT:
12144 case IEMMODE_32BIT:
12145 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
12146 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
12147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12148 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
12149 IEM_MC_END();
12150 break;
12151
12152 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12153 }
12154}
12155
12156
12157/**
12158 * @opcode 0xea
12159 */
12160FNIEMOP_DEF(iemOp_jmp_Ap)
12161{
12162 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
12163 IEMOP_HLP_NO_64BIT();
12164
12165 /* Decode the far pointer address and pass it on to the far call C implementation. */
12166 uint32_t off32Seg;
12167 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12168 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
12169 else
12170 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
12171 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
12172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12173 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
12174 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12175 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
12176}
12177
12178
12179/**
12180 * @opcode 0xeb
12181 */
12182FNIEMOP_DEF(iemOp_jmp_Jb)
12183{
12184 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
12185 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12186 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12187
12188 IEM_MC_BEGIN(0, 0, 0);
12189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12190 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12191 IEM_MC_END();
12192}
12193
12194
12195/** Opcode 0xec */
12196FNIEMOP_DEF(iemOp_in_AL_DX)
12197{
12198 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
12199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12200 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12201 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
12202}
12203
12204
12205/** Opcode 0xed */
12206FNIEMOP_DEF(iemOp_in_eAX_DX)
12207{
12208 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
12209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12210 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12211 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12212 pVCpu->iem.s.enmEffAddrMode);
12213}
12214
12215
12216/** Opcode 0xee */
12217FNIEMOP_DEF(iemOp_out_DX_AL)
12218{
12219 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
12220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12221 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12222 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
12223}
12224
12225
12226/** Opcode 0xef */
12227FNIEMOP_DEF(iemOp_out_DX_eAX)
12228{
12229 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
12230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12231 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12232 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12233 pVCpu->iem.s.enmEffAddrMode);
12234}
12235
12236
12237/**
12238 * @opcode 0xf0
12239 */
12240FNIEMOP_DEF(iemOp_lock)
12241{
12242 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
12243 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12244 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
12245
12246 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12247 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12248}
12249
12250
12251/**
12252 * @opcode 0xf1
12253 */
12254FNIEMOP_DEF(iemOp_int1)
12255{
12256 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
12257 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
12258 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
12259 * LOADALL memo. Needs some testing. */
12260 IEMOP_HLP_MIN_386();
12261 /** @todo testcase! */
12262 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
12263 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
12264 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
12265}
12266
12267
12268/**
12269 * @opcode 0xf2
12270 */
12271FNIEMOP_DEF(iemOp_repne)
12272{
12273 /* This overrides any previous REPE prefix. */
12274 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
12275 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
12276 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
12277
12278 /* For the 4 entry opcode tables, REPNZ overrides any previous
12279 REPZ and operand size prefixes. */
12280 pVCpu->iem.s.idxPrefix = 3;
12281
12282 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12283 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12284}
12285
12286
12287/**
12288 * @opcode 0xf3
12289 */
12290FNIEMOP_DEF(iemOp_repe)
12291{
12292 /* This overrides any previous REPNE prefix. */
12293 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
12294 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
12295 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
12296
12297 /* For the 4 entry opcode tables, REPNZ overrides any previous
12298 REPNZ and operand size prefixes. */
12299 pVCpu->iem.s.idxPrefix = 2;
12300
12301 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12302 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12303}
12304
12305
12306/**
12307 * @opcode 0xf4
12308 */
12309FNIEMOP_DEF(iemOp_hlt)
12310{
12311 IEMOP_MNEMONIC(hlt, "hlt");
12312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12313 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, iemCImpl_hlt);
12314}
12315
12316
12317/**
12318 * @opcode 0xf5
12319 */
12320FNIEMOP_DEF(iemOp_cmc)
12321{
12322 IEMOP_MNEMONIC(cmc, "cmc");
12323 IEM_MC_BEGIN(0, 0, 0);
12324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12325 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
12326 IEM_MC_ADVANCE_RIP_AND_FINISH();
12327 IEM_MC_END();
12328}
12329
12330
12331/**
12332 * Body for of 'inc/dec/not/neg Eb'.
12333 */
12334#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
12335 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
12336 { \
12337 /* register access */ \
12338 IEM_MC_BEGIN(2, 0, 0); \
12339 IEMOP_HLP_DONE_DECODING(); \
12340 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12341 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12342 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
12343 IEM_MC_REF_EFLAGS(pEFlags); \
12344 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12345 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12346 IEM_MC_END(); \
12347 } \
12348 else \
12349 { \
12350 /* memory access. */ \
12351 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
12352 { \
12353 IEM_MC_BEGIN(2, 2, 0); \
12354 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12355 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12357 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12358 \
12359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12360 IEMOP_HLP_DONE_DECODING(); \
12361 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12362 IEM_MC_FETCH_EFLAGS(EFlags); \
12363 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12364 \
12365 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12366 IEM_MC_COMMIT_EFLAGS(EFlags); \
12367 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12368 IEM_MC_END(); \
12369 } \
12370 else \
12371 { \
12372 IEM_MC_BEGIN(2, 2, 0); \
12373 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12374 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12376 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12377 \
12378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12379 IEMOP_HLP_DONE_DECODING(); \
12380 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12381 IEM_MC_FETCH_EFLAGS(EFlags); \
12382 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
12383 \
12384 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12385 IEM_MC_COMMIT_EFLAGS(EFlags); \
12386 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12387 IEM_MC_END(); \
12388 } \
12389 } \
12390 (void)0
12391
12392
12393/**
12394 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
12395 */
12396#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
12397 if (IEM_IS_MODRM_REG_MODE(bRm)) \
12398 { \
12399 /* \
12400 * Register target \
12401 */ \
12402 switch (pVCpu->iem.s.enmEffOpSize) \
12403 { \
12404 case IEMMODE_16BIT: \
12405 IEM_MC_BEGIN(2, 0, 0); \
12406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12407 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12408 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12409 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12410 IEM_MC_REF_EFLAGS(pEFlags); \
12411 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
12412 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12413 IEM_MC_END(); \
12414 break; \
12415 \
12416 case IEMMODE_32BIT: \
12417 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386); \
12418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12419 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12420 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12421 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12422 IEM_MC_REF_EFLAGS(pEFlags); \
12423 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
12424 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
12425 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12426 IEM_MC_END(); \
12427 break; \
12428 \
12429 case IEMMODE_64BIT: \
12430 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT); \
12431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12432 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12433 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12434 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12435 IEM_MC_REF_EFLAGS(pEFlags); \
12436 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
12437 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12438 IEM_MC_END(); \
12439 break; \
12440 \
12441 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12442 } \
12443 } \
12444 else \
12445 { \
12446 /* \
12447 * Memory target. \
12448 */ \
12449 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
12450 { \
12451 switch (pVCpu->iem.s.enmEffOpSize) \
12452 { \
12453 case IEMMODE_16BIT: \
12454 IEM_MC_BEGIN(2, 3, 0); \
12455 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12456 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12458 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12459 \
12460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12462 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12463 IEM_MC_FETCH_EFLAGS(EFlags); \
12464 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
12465 \
12466 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
12467 IEM_MC_COMMIT_EFLAGS(EFlags); \
12468 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12469 IEM_MC_END(); \
12470 break; \
12471 \
12472 case IEMMODE_32BIT: \
12473 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386); \
12474 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12475 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12477 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12478 \
12479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12481 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12482 IEM_MC_FETCH_EFLAGS(EFlags); \
12483 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
12484 \
12485 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
12486 IEM_MC_COMMIT_EFLAGS(EFlags); \
12487 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12488 IEM_MC_END(); \
12489 break; \
12490 \
12491 case IEMMODE_64BIT: \
12492 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT); \
12493 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12494 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12496 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12497 \
12498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12500 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12501 IEM_MC_FETCH_EFLAGS(EFlags); \
12502 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
12503 \
12504 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
12505 IEM_MC_COMMIT_EFLAGS(EFlags); \
12506 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12507 IEM_MC_END(); \
12508 break; \
12509 \
12510 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12511 } \
12512 } \
12513 else \
12514 { \
12515 (void)0
12516
12517#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
12518 switch (pVCpu->iem.s.enmEffOpSize) \
12519 { \
12520 case IEMMODE_16BIT: \
12521 IEM_MC_BEGIN(2, 3, 0); \
12522 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12523 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12525 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12526 \
12527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12528 IEMOP_HLP_DONE_DECODING(); \
12529 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12530 IEM_MC_FETCH_EFLAGS(EFlags); \
12531 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
12532 \
12533 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
12534 IEM_MC_COMMIT_EFLAGS(EFlags); \
12535 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12536 IEM_MC_END(); \
12537 break; \
12538 \
12539 case IEMMODE_32BIT: \
12540 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386); \
12541 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12542 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12544 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12545 \
12546 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12547 IEMOP_HLP_DONE_DECODING(); \
12548 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12549 IEM_MC_FETCH_EFLAGS(EFlags); \
12550 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
12551 \
12552 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
12553 IEM_MC_COMMIT_EFLAGS(EFlags); \
12554 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12555 IEM_MC_END(); \
12556 break; \
12557 \
12558 case IEMMODE_64BIT: \
12559 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT); \
12560 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12561 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12563 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12564 \
12565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12566 IEMOP_HLP_DONE_DECODING(); \
12567 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12568 IEM_MC_FETCH_EFLAGS(EFlags); \
12569 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
12570 \
12571 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
12572 IEM_MC_COMMIT_EFLAGS(EFlags); \
12573 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12574 IEM_MC_END(); \
12575 break; \
12576 \
12577 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12578 } \
12579 } \
12580 } \
12581 (void)0
12582
12583
12584/**
12585 * @opmaps grp3_f6
12586 * @opcode /0
12587 * @todo also /1
12588 */
12589FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
12590{
12591 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
12592 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12593
12594 if (IEM_IS_MODRM_REG_MODE(bRm))
12595 {
12596 /* register access */
12597 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12598 IEM_MC_BEGIN(3, 0, 0);
12599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12600 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12601 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
12602 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12603 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12604 IEM_MC_REF_EFLAGS(pEFlags);
12605 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
12606 IEM_MC_ADVANCE_RIP_AND_FINISH();
12607 IEM_MC_END();
12608 }
12609 else
12610 {
12611 /* memory access. */
12612 IEM_MC_BEGIN(3, 3, 0);
12613 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
12614 IEM_MC_ARG(uint8_t, u8Src, 1);
12615 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12617 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12618
12619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12620 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12621 IEM_MC_ASSIGN(u8Src, u8Imm);
12622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12623 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12624 IEM_MC_FETCH_EFLAGS(EFlags);
12625 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
12626
12627 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo);
12628 IEM_MC_COMMIT_EFLAGS(EFlags);
12629 IEM_MC_ADVANCE_RIP_AND_FINISH();
12630 IEM_MC_END();
12631 }
12632}
12633
12634
12635/** Opcode 0xf6 /4, /5, /6 and /7. */
12636FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
12637{
12638 if (IEM_IS_MODRM_REG_MODE(bRm))
12639 {
12640 /* register access */
12641 IEM_MC_BEGIN(3, 1, 0);
12642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12643 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12644 IEM_MC_ARG(uint8_t, u8Value, 1);
12645 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12646 IEM_MC_LOCAL(int32_t, rc);
12647
12648 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12649 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12650 IEM_MC_REF_EFLAGS(pEFlags);
12651 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12652 IEM_MC_IF_LOCAL_IS_Z(rc) {
12653 IEM_MC_ADVANCE_RIP_AND_FINISH();
12654 } IEM_MC_ELSE() {
12655 IEM_MC_RAISE_DIVIDE_ERROR();
12656 } IEM_MC_ENDIF();
12657
12658 IEM_MC_END();
12659 }
12660 else
12661 {
12662 /* memory access. */
12663 IEM_MC_BEGIN(3, 2, 0);
12664 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12665 IEM_MC_ARG(uint8_t, u8Value, 1);
12666 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12668 IEM_MC_LOCAL(int32_t, rc);
12669
12670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12672 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12673 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12674 IEM_MC_REF_EFLAGS(pEFlags);
12675 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12676 IEM_MC_IF_LOCAL_IS_Z(rc) {
12677 IEM_MC_ADVANCE_RIP_AND_FINISH();
12678 } IEM_MC_ELSE() {
12679 IEM_MC_RAISE_DIVIDE_ERROR();
12680 } IEM_MC_ENDIF();
12681
12682 IEM_MC_END();
12683 }
12684}
12685
12686
12687/** Opcode 0xf7 /4, /5, /6 and /7. */
12688FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
12689{
12690 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12691
12692 if (IEM_IS_MODRM_REG_MODE(bRm))
12693 {
12694 /* register access */
12695 switch (pVCpu->iem.s.enmEffOpSize)
12696 {
12697 case IEMMODE_16BIT:
12698 IEM_MC_BEGIN(4, 1, 0);
12699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12700 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12701 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12702 IEM_MC_ARG(uint16_t, u16Value, 2);
12703 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12704 IEM_MC_LOCAL(int32_t, rc);
12705
12706 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12707 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12708 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12709 IEM_MC_REF_EFLAGS(pEFlags);
12710 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12711 IEM_MC_IF_LOCAL_IS_Z(rc) {
12712 IEM_MC_ADVANCE_RIP_AND_FINISH();
12713 } IEM_MC_ELSE() {
12714 IEM_MC_RAISE_DIVIDE_ERROR();
12715 } IEM_MC_ENDIF();
12716
12717 IEM_MC_END();
12718 break;
12719
12720 case IEMMODE_32BIT:
12721 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386);
12722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12723 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12724 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12725 IEM_MC_ARG(uint32_t, u32Value, 2);
12726 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12727 IEM_MC_LOCAL(int32_t, rc);
12728
12729 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12730 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12731 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12732 IEM_MC_REF_EFLAGS(pEFlags);
12733 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12734 IEM_MC_IF_LOCAL_IS_Z(rc) {
12735 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12736 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12737 IEM_MC_ADVANCE_RIP_AND_FINISH();
12738 } IEM_MC_ELSE() {
12739 IEM_MC_RAISE_DIVIDE_ERROR();
12740 } IEM_MC_ENDIF();
12741
12742 IEM_MC_END();
12743 break;
12744
12745 case IEMMODE_64BIT:
12746 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT);
12747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12748 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12749 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12750 IEM_MC_ARG(uint64_t, u64Value, 2);
12751 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12752 IEM_MC_LOCAL(int32_t, rc);
12753
12754 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12755 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12756 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12757 IEM_MC_REF_EFLAGS(pEFlags);
12758 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12759 IEM_MC_IF_LOCAL_IS_Z(rc) {
12760 IEM_MC_ADVANCE_RIP_AND_FINISH();
12761 } IEM_MC_ELSE() {
12762 IEM_MC_RAISE_DIVIDE_ERROR();
12763 } IEM_MC_ENDIF();
12764
12765 IEM_MC_END();
12766 break;
12767
12768 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12769 }
12770 }
12771 else
12772 {
12773 /* memory access. */
12774 switch (pVCpu->iem.s.enmEffOpSize)
12775 {
12776 case IEMMODE_16BIT:
12777 IEM_MC_BEGIN(4, 2, 0);
12778 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12779 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12780 IEM_MC_ARG(uint16_t, u16Value, 2);
12781 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12783 IEM_MC_LOCAL(int32_t, rc);
12784
12785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12787 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12788 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12789 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12790 IEM_MC_REF_EFLAGS(pEFlags);
12791 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12792 IEM_MC_IF_LOCAL_IS_Z(rc) {
12793 IEM_MC_ADVANCE_RIP_AND_FINISH();
12794 } IEM_MC_ELSE() {
12795 IEM_MC_RAISE_DIVIDE_ERROR();
12796 } IEM_MC_ENDIF();
12797
12798 IEM_MC_END();
12799 break;
12800
12801 case IEMMODE_32BIT:
12802 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386);
12803 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12804 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12805 IEM_MC_ARG(uint32_t, u32Value, 2);
12806 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12808 IEM_MC_LOCAL(int32_t, rc);
12809
12810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12812 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12813 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12814 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12815 IEM_MC_REF_EFLAGS(pEFlags);
12816 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12817 IEM_MC_IF_LOCAL_IS_Z(rc) {
12818 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12819 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12820 IEM_MC_ADVANCE_RIP_AND_FINISH();
12821 } IEM_MC_ELSE() {
12822 IEM_MC_RAISE_DIVIDE_ERROR();
12823 } IEM_MC_ENDIF();
12824
12825 IEM_MC_END();
12826 break;
12827
12828 case IEMMODE_64BIT:
12829 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT);
12830 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12831 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12832 IEM_MC_ARG(uint64_t, u64Value, 2);
12833 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12835 IEM_MC_LOCAL(int32_t, rc);
12836
12837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12839 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12840 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12841 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12842 IEM_MC_REF_EFLAGS(pEFlags);
12843 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12844 IEM_MC_IF_LOCAL_IS_Z(rc) {
12845 IEM_MC_ADVANCE_RIP_AND_FINISH();
12846 } IEM_MC_ELSE() {
12847 IEM_MC_RAISE_DIVIDE_ERROR();
12848 } IEM_MC_ENDIF();
12849
12850 IEM_MC_END();
12851 break;
12852
12853 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12854 }
12855 }
12856}
12857
12858
12859/**
12860 * @opmaps grp3_f6
12861 * @opcode /2
12862 */
12863FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
12864{
12865 IEMOP_MNEMONIC(not_Eb, "not Eb");
12866 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
12867}
12868
12869
12870/**
12871 * @opmaps grp3_f6
12872 * @opcode /3
12873 */
12874FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
12875{
12876 IEMOP_MNEMONIC(net_Eb, "neg Eb");
12877 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
12878}
12879
12880
12881/**
12882 * @opcode 0xf6
12883 */
12884FNIEMOP_DEF(iemOp_Grp3_Eb)
12885{
12886 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12887 switch (IEM_GET_MODRM_REG_8(bRm))
12888 {
12889 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12890 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12891 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
12892 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
12893 case 4:
12894 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
12895 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12896 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
12897 case 5:
12898 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
12899 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12900 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
12901 case 6:
12902 IEMOP_MNEMONIC(div_Eb, "div Eb");
12903 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12904 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
12905 case 7:
12906 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
12907 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12908 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
12909 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12910 }
12911}
12912
12913
12914/** Opcode 0xf7 /0. */
12915FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
12916{
12917 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
12918 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12919
12920 if (IEM_IS_MODRM_REG_MODE(bRm))
12921 {
12922 /* register access */
12923 switch (pVCpu->iem.s.enmEffOpSize)
12924 {
12925 case IEMMODE_16BIT:
12926 IEM_MC_BEGIN(3, 0, 0);
12927 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12929 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12930 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
12931 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12932 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12933 IEM_MC_REF_EFLAGS(pEFlags);
12934 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12935 IEM_MC_ADVANCE_RIP_AND_FINISH();
12936 IEM_MC_END();
12937 break;
12938
12939 case IEMMODE_32BIT:
12940 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386);
12941 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12943 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12944 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
12945 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12946 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12947 IEM_MC_REF_EFLAGS(pEFlags);
12948 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
12949 /* No clearing the high dword here - test doesn't write back the result. */
12950 IEM_MC_ADVANCE_RIP_AND_FINISH();
12951 IEM_MC_END();
12952 break;
12953
12954 case IEMMODE_64BIT:
12955 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT);
12956 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12958 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12959 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
12960 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12961 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12962 IEM_MC_REF_EFLAGS(pEFlags);
12963 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
12964 IEM_MC_ADVANCE_RIP_AND_FINISH();
12965 IEM_MC_END();
12966 break;
12967
12968 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12969 }
12970 }
12971 else
12972 {
12973 /* memory access. */
12974 switch (pVCpu->iem.s.enmEffOpSize)
12975 {
12976 case IEMMODE_16BIT:
12977 IEM_MC_BEGIN(3, 3, 0);
12978 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
12979 IEM_MC_ARG(uint16_t, u16Src, 1);
12980 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12982 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12983
12984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12985 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12986 IEM_MC_ASSIGN(u16Src, u16Imm);
12987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12988 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12989 IEM_MC_FETCH_EFLAGS(EFlags);
12990 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12991
12992 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo);
12993 IEM_MC_COMMIT_EFLAGS(EFlags);
12994 IEM_MC_ADVANCE_RIP_AND_FINISH();
12995 IEM_MC_END();
12996 break;
12997
12998 case IEMMODE_32BIT:
12999 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386);
13000 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
13001 IEM_MC_ARG(uint32_t, u32Src, 1);
13002 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13004 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13005
13006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13007 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13008 IEM_MC_ASSIGN(u32Src, u32Imm);
13009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13010 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13011 IEM_MC_FETCH_EFLAGS(EFlags);
13012 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13013
13014 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo);
13015 IEM_MC_COMMIT_EFLAGS(EFlags);
13016 IEM_MC_ADVANCE_RIP_AND_FINISH();
13017 IEM_MC_END();
13018 break;
13019
13020 case IEMMODE_64BIT:
13021 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT);
13022 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
13023 IEM_MC_ARG(uint64_t, u64Src, 1);
13024 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13026 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13027
13028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13029 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13030 IEM_MC_ASSIGN(u64Src, u64Imm);
13031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13032 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13033 IEM_MC_FETCH_EFLAGS(EFlags);
13034 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13035
13036 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo);
13037 IEM_MC_COMMIT_EFLAGS(EFlags);
13038 IEM_MC_ADVANCE_RIP_AND_FINISH();
13039 IEM_MC_END();
13040 break;
13041
13042 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13043 }
13044 }
13045}
13046
13047
13048/** Opcode 0xf7 /2. */
13049FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
13050{
13051 IEMOP_MNEMONIC(not_Ev, "not Ev");
13052 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
13053 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
13054}
13055
13056
13057/** Opcode 0xf7 /3. */
13058FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
13059{
13060 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
13061 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
13062 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
13063}
13064
13065
13066/**
13067 * @opcode 0xf7
13068 */
13069FNIEMOP_DEF(iemOp_Grp3_Ev)
13070{
13071 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13072 switch (IEM_GET_MODRM_REG_8(bRm))
13073 {
13074 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13075 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13076 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
13077 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
13078 case 4:
13079 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
13080 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13081 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
13082 case 5:
13083 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
13084 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13085 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
13086 case 6:
13087 IEMOP_MNEMONIC(div_Ev, "div Ev");
13088 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13089 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
13090 case 7:
13091 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
13092 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13093 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
13094 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13095 }
13096}
13097
13098
13099/**
13100 * @opcode 0xf8
13101 */
13102FNIEMOP_DEF(iemOp_clc)
13103{
13104 IEMOP_MNEMONIC(clc, "clc");
13105 IEM_MC_BEGIN(0, 0, 0);
13106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13107 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
13108 IEM_MC_ADVANCE_RIP_AND_FINISH();
13109 IEM_MC_END();
13110}
13111
13112
13113/**
13114 * @opcode 0xf9
13115 */
13116FNIEMOP_DEF(iemOp_stc)
13117{
13118 IEMOP_MNEMONIC(stc, "stc");
13119 IEM_MC_BEGIN(0, 0, 0);
13120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13121 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
13122 IEM_MC_ADVANCE_RIP_AND_FINISH();
13123 IEM_MC_END();
13124}
13125
13126
13127/**
13128 * @opcode 0xfa
13129 */
13130FNIEMOP_DEF(iemOp_cli)
13131{
13132 IEMOP_MNEMONIC(cli, "cli");
13133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13134 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, iemCImpl_cli);
13135}
13136
13137
13138FNIEMOP_DEF(iemOp_sti)
13139{
13140 IEMOP_MNEMONIC(sti, "sti");
13141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13142 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER | IEM_CIMPL_F_VMEXIT, iemCImpl_sti);
13143}
13144
13145
13146/**
13147 * @opcode 0xfc
13148 */
13149FNIEMOP_DEF(iemOp_cld)
13150{
13151 IEMOP_MNEMONIC(cld, "cld");
13152 IEM_MC_BEGIN(0, 0, 0);
13153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13154 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
13155 IEM_MC_ADVANCE_RIP_AND_FINISH();
13156 IEM_MC_END();
13157}
13158
13159
13160/**
13161 * @opcode 0xfd
13162 */
13163FNIEMOP_DEF(iemOp_std)
13164{
13165 IEMOP_MNEMONIC(std, "std");
13166 IEM_MC_BEGIN(0, 0, 0);
13167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13168 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
13169 IEM_MC_ADVANCE_RIP_AND_FINISH();
13170 IEM_MC_END();
13171}
13172
13173
13174/**
13175 * @opmaps grp4
13176 * @opcode /0
13177 */
13178FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
13179{
13180 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
13181 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
13182}
13183
13184
13185/**
13186 * @opmaps grp4
13187 * @opcode /1
13188 */
13189FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
13190{
13191 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
13192 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
13193}
13194
13195
13196/**
13197 * @opcode 0xfe
13198 */
13199FNIEMOP_DEF(iemOp_Grp4)
13200{
13201 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13202 switch (IEM_GET_MODRM_REG_8(bRm))
13203 {
13204 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
13205 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
13206 default:
13207 /** @todo is the eff-addr decoded? */
13208 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
13209 IEMOP_RAISE_INVALID_OPCODE_RET();
13210 }
13211}
13212
13213/** Opcode 0xff /0. */
13214FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
13215{
13216 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
13217 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
13218 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
13219}
13220
13221
13222/** Opcode 0xff /1. */
13223FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
13224{
13225 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
13226 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
13227 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
13228}
13229
13230
13231/**
13232 * Opcode 0xff /2.
13233 * @param bRm The RM byte.
13234 */
13235FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
13236{
13237 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
13238 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13239
13240 if (IEM_IS_MODRM_REG_MODE(bRm))
13241 {
13242 /* The new RIP is taken from a register. */
13243 switch (pVCpu->iem.s.enmEffOpSize)
13244 {
13245 case IEMMODE_16BIT:
13246 IEM_MC_BEGIN(1, 0, 0);
13247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13248 IEM_MC_ARG(uint16_t, u16Target, 0);
13249 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13250 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
13251 IEM_MC_END();
13252 break;
13253
13254 case IEMMODE_32BIT:
13255 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386);
13256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13257 IEM_MC_ARG(uint32_t, u32Target, 0);
13258 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13259 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
13260 IEM_MC_END();
13261 break;
13262
13263 case IEMMODE_64BIT:
13264 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT);
13265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13266 IEM_MC_ARG(uint64_t, u64Target, 0);
13267 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13268 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
13269 IEM_MC_END();
13270 break;
13271
13272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13273 }
13274 }
13275 else
13276 {
13277 /* The new RIP is taken from a register. */
13278 switch (pVCpu->iem.s.enmEffOpSize)
13279 {
13280 case IEMMODE_16BIT:
13281 IEM_MC_BEGIN(1, 1, 0);
13282 IEM_MC_ARG(uint16_t, u16Target, 0);
13283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13286 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13287 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
13288 IEM_MC_END();
13289 break;
13290
13291 case IEMMODE_32BIT:
13292 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386);
13293 IEM_MC_ARG(uint32_t, u32Target, 0);
13294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13297 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13298 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
13299 IEM_MC_END();
13300 break;
13301
13302 case IEMMODE_64BIT:
13303 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT);
13304 IEM_MC_ARG(uint64_t, u64Target, 0);
13305 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13308 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13309 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
13310 IEM_MC_END();
13311 break;
13312
13313 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13314 }
13315 }
13316}
13317
13318#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl) \
13319 /* Registers? How?? */ \
13320 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
13321 { /* likely */ } \
13322 else \
13323 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
13324 \
13325 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
13326 /** @todo what does VIA do? */ \
13327 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
13328 { /* likely */ } \
13329 else \
13330 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
13331 \
13332 /* Far pointer loaded from memory. */ \
13333 switch (pVCpu->iem.s.enmEffOpSize) \
13334 { \
13335 case IEMMODE_16BIT: \
13336 IEM_MC_BEGIN(3, 1, 0); \
13337 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13338 IEM_MC_ARG(uint16_t, offSeg, 1); \
13339 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
13340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13343 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13344 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
13345 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
13346 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
13347 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13348 IEM_MC_END(); \
13349 break; \
13350 \
13351 case IEMMODE_32BIT: \
13352 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386); \
13353 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13354 IEM_MC_ARG(uint32_t, offSeg, 1); \
13355 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
13356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13359 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13360 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
13361 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
13362 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
13363 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13364 IEM_MC_END(); \
13365 break; \
13366 \
13367 case IEMMODE_64BIT: \
13368 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
13369 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT); \
13370 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13371 IEM_MC_ARG(uint64_t, offSeg, 1); \
13372 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
13373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13374 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13376 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13377 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
13378 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE /* no gates */, \
13379 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13380 IEM_MC_END(); \
13381 break; \
13382 \
13383 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13384 } do {} while (0)
13385
13386
13387/**
13388 * Opcode 0xff /3.
13389 * @param bRm The RM byte.
13390 */
13391FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
13392{
13393 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
13394 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf);
13395}
13396
13397
13398/**
13399 * Opcode 0xff /4.
13400 * @param bRm The RM byte.
13401 */
13402FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
13403{
13404 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
13405 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13406
13407 if (IEM_IS_MODRM_REG_MODE(bRm))
13408 {
13409 /* The new RIP is taken from a register. */
13410 switch (pVCpu->iem.s.enmEffOpSize)
13411 {
13412 case IEMMODE_16BIT:
13413 IEM_MC_BEGIN(0, 1, 0);
13414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13415 IEM_MC_LOCAL(uint16_t, u16Target);
13416 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13417 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
13418 IEM_MC_END();
13419 break;
13420
13421 case IEMMODE_32BIT:
13422 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
13423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13424 IEM_MC_LOCAL(uint32_t, u32Target);
13425 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13426 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
13427 IEM_MC_END();
13428 break;
13429
13430 case IEMMODE_64BIT:
13431 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
13432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13433 IEM_MC_LOCAL(uint64_t, u64Target);
13434 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13435 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
13436 IEM_MC_END();
13437 break;
13438
13439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13440 }
13441 }
13442 else
13443 {
13444 /* The new RIP is taken from a memory location. */
13445 switch (pVCpu->iem.s.enmEffOpSize)
13446 {
13447 case IEMMODE_16BIT:
13448 IEM_MC_BEGIN(0, 2, 0);
13449 IEM_MC_LOCAL(uint16_t, u16Target);
13450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13453 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13454 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
13455 IEM_MC_END();
13456 break;
13457
13458 case IEMMODE_32BIT:
13459 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386);
13460 IEM_MC_LOCAL(uint32_t, u32Target);
13461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13464 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13465 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
13466 IEM_MC_END();
13467 break;
13468
13469 case IEMMODE_64BIT:
13470 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
13471 IEM_MC_LOCAL(uint64_t, u64Target);
13472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13475 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13476 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
13477 IEM_MC_END();
13478 break;
13479
13480 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13481 }
13482 }
13483}
13484
13485
13486/**
13487 * Opcode 0xff /5.
13488 * @param bRm The RM byte.
13489 */
13490FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
13491{
13492 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
13493 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp);
13494}
13495
13496
13497/**
13498 * Opcode 0xff /6.
13499 * @param bRm The RM byte.
13500 */
13501FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
13502{
13503 IEMOP_MNEMONIC(push_Ev, "push Ev");
13504
13505 /* Registers are handled by a common worker. */
13506 if (IEM_IS_MODRM_REG_MODE(bRm))
13507 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
13508
13509 /* Memory we do here. */
13510 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13511 switch (pVCpu->iem.s.enmEffOpSize)
13512 {
13513 case IEMMODE_16BIT:
13514 IEM_MC_BEGIN(0, 2, 0);
13515 IEM_MC_LOCAL(uint16_t, u16Src);
13516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13519 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13520 IEM_MC_PUSH_U16(u16Src);
13521 IEM_MC_ADVANCE_RIP_AND_FINISH();
13522 IEM_MC_END();
13523 break;
13524
13525 case IEMMODE_32BIT:
13526 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386);
13527 IEM_MC_LOCAL(uint32_t, u32Src);
13528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13531 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13532 IEM_MC_PUSH_U32(u32Src);
13533 IEM_MC_ADVANCE_RIP_AND_FINISH();
13534 IEM_MC_END();
13535 break;
13536
13537 case IEMMODE_64BIT:
13538 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
13539 IEM_MC_LOCAL(uint64_t, u64Src);
13540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13543 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13544 IEM_MC_PUSH_U64(u64Src);
13545 IEM_MC_ADVANCE_RIP_AND_FINISH();
13546 IEM_MC_END();
13547 break;
13548
13549 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13550 }
13551}
13552
13553
13554/**
13555 * @opcode 0xff
13556 */
13557FNIEMOP_DEF(iemOp_Grp5)
13558{
13559 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13560 switch (IEM_GET_MODRM_REG_8(bRm))
13561 {
13562 case 0:
13563 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
13564 case 1:
13565 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
13566 case 2:
13567 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
13568 case 3:
13569 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
13570 case 4:
13571 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
13572 case 5:
13573 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
13574 case 6:
13575 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
13576 case 7:
13577 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
13578 IEMOP_RAISE_INVALID_OPCODE_RET();
13579 }
13580 AssertFailedReturn(VERR_IEM_IPE_3);
13581}
13582
13583
13584
13585const PFNIEMOP g_apfnOneByteMap[256] =
13586{
13587 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
13588 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
13589 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
13590 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
13591 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
13592 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
13593 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
13594 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
13595 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
13596 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
13597 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
13598 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
13599 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
13600 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
13601 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
13602 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
13603 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
13604 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
13605 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
13606 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
13607 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
13608 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
13609 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
13610 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
13611 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
13612 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
13613 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
13614 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
13615 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
13616 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
13617 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
13618 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
13619 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
13620 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
13621 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
13622 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
13623 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
13624 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
13625 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
13626 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
13627 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
13628 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
13629 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
13630 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
13631 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
13632 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
13633 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
13634 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
13635 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
13636 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
13637 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
13638 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
13639 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
13640 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
13641 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
13642 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
13643 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
13644 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
13645 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
13646 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
13647 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
13648 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
13649 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
13650 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
13651};
13652
13653
13654/** @} */
13655
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette