VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 103513

Last change on this file since 103513 was 103513, checked in by vboxsync, 14 months ago

VMM/IEM: The vex3 & vex2 byte prefixes must set the IEM_OP_PRF_REX_R/X/B flags so IEM_GET_MODRM_EX will work correctly for vex instructions. bugref:10370

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 554.4 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 103513 2024-02-22 03:50:36Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 */
63#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8, a_fnLockedU8) \
64 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
65 \
66 /* \
67 * If rm is denoting a register, no more instruction bytes. \
68 */ \
69 if (IEM_IS_MODRM_REG_MODE(bRm)) \
70 { \
71 IEM_MC_BEGIN(3, 0, 0, 0); \
72 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
73 IEM_MC_ARG(uint8_t, u8Src, 1); \
74 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
75 \
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
77 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
78 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
79 IEM_MC_REF_EFLAGS(pEFlags); \
80 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
81 \
82 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
83 IEM_MC_END(); \
84 } \
85 else \
86 { \
87 /* \
88 * We're accessing memory. \
89 * Note! We're putting the eflags on the stack here so we can commit them \
90 * after the memory. \
91 */ \
92 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
93 { \
94 IEM_MC_BEGIN(3, 3, 0, 0); \
95 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
96 IEM_MC_ARG(uint8_t, u8Src, 1); \
97 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
98 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
99 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
100 \
101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
102 IEMOP_HLP_DONE_DECODING(); \
103 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
104 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
105 IEM_MC_FETCH_EFLAGS(EFlags); \
106 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
107 \
108 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
109 IEM_MC_COMMIT_EFLAGS(EFlags); \
110 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
111 IEM_MC_END(); \
112 } \
113 else \
114 { \
115 IEM_MC_BEGIN(3, 3, 0, 0); \
116 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
117 IEM_MC_ARG(uint8_t, u8Src, 1); \
118 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
120 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
121 \
122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
123 IEMOP_HLP_DONE_DECODING(); \
124 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
125 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
126 IEM_MC_FETCH_EFLAGS(EFlags); \
127 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
128 \
129 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bMapInfoDst); \
130 IEM_MC_COMMIT_EFLAGS(EFlags); \
131 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
132 IEM_MC_END(); \
133 } \
134 } \
135 (void)0
136
137/**
138 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
139 * operands.
140 */
141#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8) \
142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
143 \
144 /* \
145 * If rm is denoting a register, no more instruction bytes. \
146 */ \
147 if (IEM_IS_MODRM_REG_MODE(bRm)) \
148 { \
149 IEM_MC_BEGIN(3, 0, 0, 0); \
150 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
151 IEM_MC_ARG(uint8_t, u8Src, 1); \
152 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
153 \
154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
155 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
156 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
157 IEM_MC_REF_EFLAGS(pEFlags); \
158 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
159 \
160 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
161 IEM_MC_END(); \
162 } \
163 else \
164 { \
165 /* \
166 * We're accessing memory. \
167 * Note! We're putting the eflags on the stack here so we can commit them \
168 * after the memory. \
169 */ \
170 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
171 { \
172 IEM_MC_BEGIN(3, 3, 0, 0); \
173 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
174 IEM_MC_ARG(uint8_t, u8Src, 1); \
175 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
177 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
178 \
179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
180 IEMOP_HLP_DONE_DECODING(); \
181 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
182 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
183 IEM_MC_FETCH_EFLAGS(EFlags); \
184 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
185 \
186 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
187 IEM_MC_COMMIT_EFLAGS(EFlags); \
188 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
189 IEM_MC_END(); \
190 } \
191 else \
192 { \
193 IEMOP_HLP_DONE_DECODING(); \
194 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
195 } \
196 } \
197 (void)0
198
199/**
200 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
201 * destination.
202 */
203#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
204 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
205 \
206 /* \
207 * If rm is denoting a register, no more instruction bytes. \
208 */ \
209 if (IEM_IS_MODRM_REG_MODE(bRm)) \
210 { \
211 IEM_MC_BEGIN(3, 0, 0, 0); \
212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
213 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
214 IEM_MC_ARG(uint8_t, u8Src, 1); \
215 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
216 \
217 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
218 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
219 IEM_MC_REF_EFLAGS(pEFlags); \
220 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
221 \
222 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
223 IEM_MC_END(); \
224 } \
225 else \
226 { \
227 /* \
228 * We're accessing memory. \
229 */ \
230 IEM_MC_BEGIN(3, 1, 0, 0); \
231 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
232 IEM_MC_ARG(uint8_t, u8Src, 1); \
233 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
235 \
236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
238 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
239 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
240 IEM_MC_REF_EFLAGS(pEFlags); \
241 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
242 \
243 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
244 IEM_MC_END(); \
245 } \
246 (void)0
247
248
249/**
250 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
251 * memory/register as the destination.
252 */
253#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
255 \
256 /* \
257 * If rm is denoting a register, no more instruction bytes. \
258 */ \
259 if (IEM_IS_MODRM_REG_MODE(bRm)) \
260 { \
261 switch (pVCpu->iem.s.enmEffOpSize) \
262 { \
263 case IEMMODE_16BIT: \
264 IEM_MC_BEGIN(3, 0, 0, 0); \
265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
266 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
267 IEM_MC_ARG(uint16_t, u16Src, 1); \
268 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
269 \
270 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
271 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
272 IEM_MC_REF_EFLAGS(pEFlags); \
273 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
274 \
275 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
276 IEM_MC_END(); \
277 break; \
278 \
279 case IEMMODE_32BIT: \
280 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
282 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
283 IEM_MC_ARG(uint32_t, u32Src, 1); \
284 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
285 \
286 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
287 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
288 IEM_MC_REF_EFLAGS(pEFlags); \
289 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
290 \
291 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
292 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
293 IEM_MC_END(); \
294 break; \
295 \
296 case IEMMODE_64BIT: \
297 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
299 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
300 IEM_MC_ARG(uint64_t, u64Src, 1); \
301 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
302 \
303 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
304 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
305 IEM_MC_REF_EFLAGS(pEFlags); \
306 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
307 \
308 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
309 IEM_MC_END(); \
310 break; \
311 \
312 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
313 } \
314 } \
315 else \
316 { \
317 /* \
318 * We're accessing memory. \
319 * Note! We're putting the eflags on the stack here so we can commit them \
320 * after the memory. \
321 */ \
322 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
323 { \
324 switch (pVCpu->iem.s.enmEffOpSize) \
325 { \
326 case IEMMODE_16BIT: \
327 IEM_MC_BEGIN(3, 3, 0, 0); \
328 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
329 IEM_MC_ARG(uint16_t, u16Src, 1); \
330 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
332 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
333 \
334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
335 IEMOP_HLP_DONE_DECODING(); \
336 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
337 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
338 IEM_MC_FETCH_EFLAGS(EFlags); \
339 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
340 \
341 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
342 IEM_MC_COMMIT_EFLAGS(EFlags); \
343 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
344 IEM_MC_END(); \
345 break; \
346 \
347 case IEMMODE_32BIT: \
348 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
349 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
350 IEM_MC_ARG(uint32_t, u32Src, 1); \
351 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
353 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
354 \
355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
356 IEMOP_HLP_DONE_DECODING(); \
357 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
358 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
359 IEM_MC_FETCH_EFLAGS(EFlags); \
360 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
361 \
362 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
363 IEM_MC_COMMIT_EFLAGS(EFlags); \
364 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
365 IEM_MC_END(); \
366 break; \
367 \
368 case IEMMODE_64BIT: \
369 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
370 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
371 IEM_MC_ARG(uint64_t, u64Src, 1); \
372 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
374 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
375 \
376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
377 IEMOP_HLP_DONE_DECODING(); \
378 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
379 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
380 IEM_MC_FETCH_EFLAGS(EFlags); \
381 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
382 \
383 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
384 IEM_MC_COMMIT_EFLAGS(EFlags); \
385 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
386 IEM_MC_END(); \
387 break; \
388 \
389 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
390 } \
391 } \
392 else \
393 { \
394 (void)0
395/* Separate macro to work around parsing issue in IEMAllInstPython.py */
396#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
397 switch (pVCpu->iem.s.enmEffOpSize) \
398 { \
399 case IEMMODE_16BIT: \
400 IEM_MC_BEGIN(3, 3, 0, 0); \
401 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
402 IEM_MC_ARG(uint16_t, u16Src, 1); \
403 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
405 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
406 \
407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
408 IEMOP_HLP_DONE_DECODING(); \
409 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
410 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
411 IEM_MC_FETCH_EFLAGS(EFlags); \
412 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
413 \
414 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
415 IEM_MC_COMMIT_EFLAGS(EFlags); \
416 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
417 IEM_MC_END(); \
418 break; \
419 \
420 case IEMMODE_32BIT: \
421 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
422 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
423 IEM_MC_ARG(uint32_t, u32Src, 1); \
424 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
426 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
427 \
428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
429 IEMOP_HLP_DONE_DECODING(); \
430 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
431 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
432 IEM_MC_FETCH_EFLAGS(EFlags); \
433 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
434 \
435 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo /* CMP,TEST */); \
436 IEM_MC_COMMIT_EFLAGS(EFlags); \
437 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
438 IEM_MC_END(); \
439 break; \
440 \
441 case IEMMODE_64BIT: \
442 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
443 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
444 IEM_MC_ARG(uint64_t, u64Src, 1); \
445 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
447 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
448 \
449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
450 IEMOP_HLP_DONE_DECODING(); \
451 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
452 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
453 IEM_MC_FETCH_EFLAGS(EFlags); \
454 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
455 \
456 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
457 IEM_MC_COMMIT_EFLAGS(EFlags); \
458 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
459 IEM_MC_END(); \
460 break; \
461 \
462 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
463 } \
464 } \
465 } \
466 (void)0
467
468/**
469 * Body for read-only word/dword/qword instructions like TEST and CMP with
470 * memory/register as the destination.
471 */
472#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
474 \
475 /* \
476 * If rm is denoting a register, no more instruction bytes. \
477 */ \
478 if (IEM_IS_MODRM_REG_MODE(bRm)) \
479 { \
480 switch (pVCpu->iem.s.enmEffOpSize) \
481 { \
482 case IEMMODE_16BIT: \
483 IEM_MC_BEGIN(3, 0, 0, 0); \
484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
485 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
486 IEM_MC_ARG(uint16_t, u16Src, 1); \
487 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
488 \
489 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
490 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
491 IEM_MC_REF_EFLAGS(pEFlags); \
492 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
493 \
494 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
495 IEM_MC_END(); \
496 break; \
497 \
498 case IEMMODE_32BIT: \
499 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
501 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
502 IEM_MC_ARG(uint32_t, u32Src, 1); \
503 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
504 \
505 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
506 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
507 IEM_MC_REF_EFLAGS(pEFlags); \
508 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
509 \
510 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
511 IEM_MC_END(); \
512 break; \
513 \
514 case IEMMODE_64BIT: \
515 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
517 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
518 IEM_MC_ARG(uint64_t, u64Src, 1); \
519 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
520 \
521 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
522 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
523 IEM_MC_REF_EFLAGS(pEFlags); \
524 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
525 \
526 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
527 IEM_MC_END(); \
528 break; \
529 \
530 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
531 } \
532 } \
533 else \
534 { \
535 /* \
536 * We're accessing memory. \
537 * Note! We're putting the eflags on the stack here so we can commit them \
538 * after the memory. \
539 */ \
540 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
541 { \
542 switch (pVCpu->iem.s.enmEffOpSize) \
543 { \
544 case IEMMODE_16BIT: \
545 IEM_MC_BEGIN(3, 3, 0, 0); \
546 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
547 IEM_MC_ARG(uint16_t, u16Src, 1); \
548 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
550 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
551 \
552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
553 IEMOP_HLP_DONE_DECODING(); \
554 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
555 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
556 IEM_MC_FETCH_EFLAGS(EFlags); \
557 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
558 \
559 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
560 IEM_MC_COMMIT_EFLAGS(EFlags); \
561 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
562 IEM_MC_END(); \
563 break; \
564 \
565 case IEMMODE_32BIT: \
566 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
567 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
568 IEM_MC_ARG(uint32_t, u32Src, 1); \
569 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
571 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
572 \
573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
574 IEMOP_HLP_DONE_DECODING(); \
575 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
576 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
577 IEM_MC_FETCH_EFLAGS(EFlags); \
578 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
579 \
580 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
581 IEM_MC_COMMIT_EFLAGS(EFlags); \
582 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
583 IEM_MC_END(); \
584 break; \
585 \
586 case IEMMODE_64BIT: \
587 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
588 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
589 IEM_MC_ARG(uint64_t, u64Src, 1); \
590 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
592 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
593 \
594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
595 IEMOP_HLP_DONE_DECODING(); \
596 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
597 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
598 IEM_MC_FETCH_EFLAGS(EFlags); \
599 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
600 \
601 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
602 IEM_MC_COMMIT_EFLAGS(EFlags); \
603 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
604 IEM_MC_END(); \
605 break; \
606 \
607 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
608 } \
609 } \
610 else \
611 { \
612 IEMOP_HLP_DONE_DECODING(); \
613 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
614 } \
615 } \
616 (void)0
617
618
619/**
620 * Body for instructions like ADD, AND, OR, ++ with working on AL with
621 * a byte immediate.
622 */
623#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
624 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
625 \
626 IEM_MC_BEGIN(3, 0, 0, 0); \
627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
628 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
629 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
630 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
631 \
632 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
633 IEM_MC_REF_EFLAGS(pEFlags); \
634 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
635 \
636 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
637 IEM_MC_END()
638
639/**
640 * Body for instructions like ADD, AND, OR, ++ with working on
641 * AX/EAX/RAX with a word/dword immediate.
642 */
643#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
644 switch (pVCpu->iem.s.enmEffOpSize) \
645 { \
646 case IEMMODE_16BIT: \
647 { \
648 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
649 \
650 IEM_MC_BEGIN(3, 0, 0, 0); \
651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
652 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
653 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
654 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
655 \
656 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
657 IEM_MC_REF_EFLAGS(pEFlags); \
658 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
659 \
660 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
661 IEM_MC_END(); \
662 } \
663 \
664 case IEMMODE_32BIT: \
665 { \
666 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
667 \
668 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
670 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
671 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
672 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
673 \
674 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
675 IEM_MC_REF_EFLAGS(pEFlags); \
676 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
677 \
678 if (a_fModifiesDstReg) \
679 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
680 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
681 IEM_MC_END(); \
682 } \
683 \
684 case IEMMODE_64BIT: \
685 { \
686 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
687 \
688 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
690 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
691 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
692 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
693 \
694 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
695 IEM_MC_REF_EFLAGS(pEFlags); \
696 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
697 \
698 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
699 IEM_MC_END(); \
700 } \
701 \
702 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
703 } \
704 (void)0
705
706
707
708/* Instruction specification format - work in progress: */
709
710/**
711 * @opcode 0x00
712 * @opmnemonic add
713 * @op1 rm:Eb
714 * @op2 reg:Gb
715 * @opmaps one
716 * @openc ModR/M
717 * @opflclass arithmetic
718 * @ophints harmless ignores_op_sizes
719 * @opstats add_Eb_Gb
720 * @opgroup og_gen_arith_bin
721 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
722 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
723 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
724 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
725 */
726FNIEMOP_DEF(iemOp_add_Eb_Gb)
727{
728 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
729 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_add_u8, iemAImpl_add_u8_locked);
730}
731
732
733/**
734 * @opcode 0x01
735 * @opgroup og_gen_arith_bin
736 * @opflclass arithmetic
737 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
738 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
739 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
740 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
741 */
742FNIEMOP_DEF(iemOp_add_Ev_Gv)
743{
744 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
745 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
746 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
747}
748
749
750/**
751 * @opcode 0x02
752 * @opgroup og_gen_arith_bin
753 * @opflclass arithmetic
754 * @opcopytests iemOp_add_Eb_Gb
755 */
756FNIEMOP_DEF(iemOp_add_Gb_Eb)
757{
758 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
759 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
760}
761
762
763/**
764 * @opcode 0x03
765 * @opgroup og_gen_arith_bin
766 * @opflclass arithmetic
767 * @opcopytests iemOp_add_Ev_Gv
768 */
769FNIEMOP_DEF(iemOp_add_Gv_Ev)
770{
771 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
772 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1, 0);
773}
774
775
776/**
777 * @opcode 0x04
778 * @opgroup og_gen_arith_bin
779 * @opflclass arithmetic
780 * @opcopytests iemOp_add_Eb_Gb
781 */
782FNIEMOP_DEF(iemOp_add_Al_Ib)
783{
784 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
785 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
786}
787
788
789/**
790 * @opcode 0x05
791 * @opgroup og_gen_arith_bin
792 * @opflclass arithmetic
793 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
794 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
795 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
796 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
797 */
798FNIEMOP_DEF(iemOp_add_eAX_Iz)
799{
800 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
801 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
802}
803
804
805/**
806 * @opcode 0x06
807 * @opgroup og_stack_sreg
808 */
809FNIEMOP_DEF(iemOp_push_ES)
810{
811 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
812 IEMOP_HLP_NO_64BIT();
813 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
814}
815
816
817/**
818 * @opcode 0x07
819 * @opgroup og_stack_sreg
820 */
821FNIEMOP_DEF(iemOp_pop_ES)
822{
823 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
824 IEMOP_HLP_NO_64BIT();
825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
826 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
827 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
828 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
829 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
830 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
831 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES),
832 iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
833}
834
835
836/**
837 * @opcode 0x08
838 * @opgroup og_gen_arith_bin
839 * @opflclass logical
840 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
841 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
842 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
843 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
844 */
845FNIEMOP_DEF(iemOp_or_Eb_Gb)
846{
847 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
848 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
849 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_or_u8, iemAImpl_or_u8_locked);
850}
851
852
853/*
854 * @opcode 0x09
855 * @opgroup og_gen_arith_bin
856 * @opflclass logical
857 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
858 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
859 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
860 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
861 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
862 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
863 * @note AF is documented as undefined, but both modern AMD and Intel CPUs clears it.
864 */
865FNIEMOP_DEF(iemOp_or_Ev_Gv)
866{
867 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
868 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
869 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
870 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
871}
872
873
874/**
875 * @opcode 0x0a
876 * @opgroup og_gen_arith_bin
877 * @opflclass logical
878 * @opcopytests iemOp_or_Eb_Gb
879 */
880FNIEMOP_DEF(iemOp_or_Gb_Eb)
881{
882 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
883 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
884 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
885}
886
887
888/**
889 * @opcode 0x0b
890 * @opgroup og_gen_arith_bin
891 * @opflclass logical
892 * @opcopytests iemOp_or_Ev_Gv
893 */
894FNIEMOP_DEF(iemOp_or_Gv_Ev)
895{
896 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
897 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
898 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1, 0);
899}
900
901
902/**
903 * @opcode 0x0c
904 * @opgroup og_gen_arith_bin
905 * @opflclass logical
906 * @opcopytests iemOp_or_Eb_Gb
907 */
908FNIEMOP_DEF(iemOp_or_Al_Ib)
909{
910 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
911 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
912 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
913}
914
915
916/**
917 * @opcode 0x0d
918 * @opgroup og_gen_arith_bin
919 * @opflclass logical
920 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
921 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
922 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
923 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
924 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
925 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
926 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
927 */
928FNIEMOP_DEF(iemOp_or_eAX_Iz)
929{
930 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
931 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
932 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
933}
934
935
936/**
937 * @opcode 0x0e
938 * @opgroup og_stack_sreg
939 */
940FNIEMOP_DEF(iemOp_push_CS)
941{
942 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
943 IEMOP_HLP_NO_64BIT();
944 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
945}
946
947
948/**
949 * @opcode 0x0f
950 * @opmnemonic EscTwo0f
951 * @openc two0f
952 * @opdisenum OP_2B_ESC
953 * @ophints harmless
954 * @opgroup og_escapes
955 */
956FNIEMOP_DEF(iemOp_2byteEscape)
957{
958#if 0 /// @todo def VBOX_STRICT
959 /* Sanity check the table the first time around. */
960 static bool s_fTested = false;
961 if (RT_LIKELY(s_fTested)) { /* likely */ }
962 else
963 {
964 s_fTested = true;
965 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
966 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
967 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
968 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
969 }
970#endif
971
972 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
973 {
974 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
975 IEMOP_HLP_MIN_286();
976 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
977 }
978 /* @opdone */
979
980 /*
981 * On the 8086 this is a POP CS instruction.
982 * For the time being we don't specify this this.
983 */
984 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
985 IEMOP_HLP_NO_64BIT();
986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
987 /** @todo eliminate END_TB here */
988 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
989 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
990 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_CS),
991 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
992}
993
994/**
995 * @opcode 0x10
996 * @opgroup og_gen_arith_bin
997 * @opflclass arithmetic_carry
998 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
999 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1000 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1001 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1002 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1003 */
1004FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1005{
1006 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1007 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_adc_u8, iemAImpl_adc_u8_locked);
1008}
1009
1010
1011/**
1012 * @opcode 0x11
1013 * @opgroup og_gen_arith_bin
1014 * @opflclass arithmetic_carry
1015 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1016 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1017 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1018 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1019 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1020 */
1021FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1022{
1023 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1024 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
1025 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1026}
1027
1028
1029/**
1030 * @opcode 0x12
1031 * @opgroup og_gen_arith_bin
1032 * @opflclass arithmetic_carry
1033 * @opcopytests iemOp_adc_Eb_Gb
1034 */
1035FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1036{
1037 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1038 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
1039}
1040
1041
1042/**
1043 * @opcode 0x13
1044 * @opgroup og_gen_arith_bin
1045 * @opflclass arithmetic_carry
1046 * @opcopytests iemOp_adc_Ev_Gv
1047 */
1048FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1049{
1050 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1051 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1, 0);
1052}
1053
1054
1055/**
1056 * @opcode 0x14
1057 * @opgroup og_gen_arith_bin
1058 * @opflclass arithmetic_carry
1059 * @opcopytests iemOp_adc_Eb_Gb
1060 */
1061FNIEMOP_DEF(iemOp_adc_Al_Ib)
1062{
1063 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1064 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1065}
1066
1067
1068/**
1069 * @opcode 0x15
1070 * @opgroup og_gen_arith_bin
1071 * @opflclass arithmetic_carry
1072 * @opcopytests iemOp_adc_Ev_Gv
1073 */
1074FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1075{
1076 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1077 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1078}
1079
1080
1081/**
1082 * @opcode 0x16
1083 */
1084FNIEMOP_DEF(iemOp_push_SS)
1085{
1086 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1087 IEMOP_HLP_NO_64BIT();
1088 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1089}
1090
1091
1092/**
1093 * @opcode 0x17
1094 */
1095FNIEMOP_DEF(iemOp_pop_SS)
1096{
1097 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1099 IEMOP_HLP_NO_64BIT();
1100 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1101 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1102 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_SS)
1103 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_SS)
1104 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_SS)
1105 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_SS),
1106 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1107}
1108
1109
1110/**
1111 * @opcode 0x18
1112 * @opgroup og_gen_arith_bin
1113 * @opflclass arithmetic_carry
1114 */
1115FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1116{
1117 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1118 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_sbb_u8, iemAImpl_sbb_u8_locked);
1119}
1120
1121
1122/**
1123 * @opcode 0x19
1124 * @opgroup og_gen_arith_bin
1125 * @opflclass arithmetic_carry
1126 */
1127FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1128{
1129 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1130 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
1131 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1132}
1133
1134
1135/**
1136 * @opcode 0x1a
1137 * @opgroup og_gen_arith_bin
1138 * @opflclass arithmetic_carry
1139 */
1140FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1141{
1142 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1143 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
1144}
1145
1146
1147/**
1148 * @opcode 0x1b
1149 * @opgroup og_gen_arith_bin
1150 * @opflclass arithmetic_carry
1151 */
1152FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1153{
1154 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1155 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1, 0);
1156}
1157
1158
1159/**
1160 * @opcode 0x1c
1161 * @opgroup og_gen_arith_bin
1162 * @opflclass arithmetic_carry
1163 */
1164FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1165{
1166 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1167 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1168}
1169
1170
1171/**
1172 * @opcode 0x1d
1173 * @opgroup og_gen_arith_bin
1174 * @opflclass arithmetic_carry
1175 */
1176FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1177{
1178 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1179 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1180}
1181
1182
1183/**
1184 * @opcode 0x1e
1185 * @opgroup og_stack_sreg
1186 */
1187FNIEMOP_DEF(iemOp_push_DS)
1188{
1189 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1190 IEMOP_HLP_NO_64BIT();
1191 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1192}
1193
1194
1195/**
1196 * @opcode 0x1f
1197 * @opgroup og_stack_sreg
1198 */
1199FNIEMOP_DEF(iemOp_pop_DS)
1200{
1201 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1203 IEMOP_HLP_NO_64BIT();
1204 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1205 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1206 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
1207 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
1208 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
1209 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS),
1210 iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1211}
1212
1213
1214/**
1215 * @opcode 0x20
1216 * @opgroup og_gen_arith_bin
1217 * @opflclass logical
1218 */
1219FNIEMOP_DEF(iemOp_and_Eb_Gb)
1220{
1221 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1222 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1223 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_and_u8, iemAImpl_and_u8_locked);
1224}
1225
1226
1227/**
1228 * @opcode 0x21
1229 * @opgroup og_gen_arith_bin
1230 * @opflclass logical
1231 */
1232FNIEMOP_DEF(iemOp_and_Ev_Gv)
1233{
1234 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1235 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1236 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
1237 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1238}
1239
1240
1241/**
1242 * @opcode 0x22
1243 * @opgroup og_gen_arith_bin
1244 * @opflclass logical
1245 */
1246FNIEMOP_DEF(iemOp_and_Gb_Eb)
1247{
1248 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1249 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1250 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1251}
1252
1253
1254/**
1255 * @opcode 0x23
1256 * @opgroup og_gen_arith_bin
1257 * @opflclass logical
1258 */
1259FNIEMOP_DEF(iemOp_and_Gv_Ev)
1260{
1261 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1262 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1263 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1, 0);
1264}
1265
1266
1267/**
1268 * @opcode 0x24
1269 * @opgroup og_gen_arith_bin
1270 * @opflclass logical
1271 */
1272FNIEMOP_DEF(iemOp_and_Al_Ib)
1273{
1274 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1275 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1276 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1277}
1278
1279
1280/**
1281 * @opcode 0x25
1282 * @opgroup og_gen_arith_bin
1283 * @opflclass logical
1284 */
1285FNIEMOP_DEF(iemOp_and_eAX_Iz)
1286{
1287 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1288 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1289 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1290}
1291
1292
1293/**
1294 * @opcode 0x26
1295 * @opmnemonic SEG
1296 * @op1 ES
1297 * @opgroup og_prefix
1298 * @openc prefix
1299 * @opdisenum OP_SEG
1300 * @ophints harmless
1301 */
1302FNIEMOP_DEF(iemOp_seg_ES)
1303{
1304 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1305 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1306 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1307
1308 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1309 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1310}
1311
1312
1313/**
1314 * @opcode 0x27
1315 * @opfltest af,cf
1316 * @opflmodify cf,pf,af,zf,sf,of
1317 * @opflundef of
1318 */
1319FNIEMOP_DEF(iemOp_daa)
1320{
1321 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1322 IEMOP_HLP_NO_64BIT();
1323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1324 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1325 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_daa);
1326}
1327
1328
1329/**
1330 * @opcode 0x28
1331 * @opgroup og_gen_arith_bin
1332 * @opflclass arithmetic
1333 */
1334FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1335{
1336 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1337 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_sub_u8, iemAImpl_sub_u8_locked);
1338}
1339
1340
1341/**
1342 * @opcode 0x29
1343 * @opgroup og_gen_arith_bin
1344 * @opflclass arithmetic
1345 */
1346FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1347{
1348 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1349 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
1350 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1351}
1352
1353
1354/**
1355 * @opcode 0x2a
1356 * @opgroup og_gen_arith_bin
1357 * @opflclass arithmetic
1358 */
1359FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1360{
1361 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1362 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1363}
1364
1365
1366/**
1367 * @opcode 0x2b
1368 * @opgroup og_gen_arith_bin
1369 * @opflclass arithmetic
1370 */
1371FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1372{
1373 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1374 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1, 0);
1375}
1376
1377
1378/**
1379 * @opcode 0x2c
1380 * @opgroup og_gen_arith_bin
1381 * @opflclass arithmetic
1382 */
1383FNIEMOP_DEF(iemOp_sub_Al_Ib)
1384{
1385 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1386 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1387}
1388
1389
1390/**
1391 * @opcode 0x2d
1392 * @opgroup og_gen_arith_bin
1393 * @opflclass arithmetic
1394 */
1395FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1396{
1397 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1398 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1399}
1400
1401
1402/**
1403 * @opcode 0x2e
1404 * @opmnemonic SEG
1405 * @op1 CS
1406 * @opgroup og_prefix
1407 * @openc prefix
1408 * @opdisenum OP_SEG
1409 * @ophints harmless
1410 */
1411FNIEMOP_DEF(iemOp_seg_CS)
1412{
1413 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1414 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1415 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1416
1417 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1418 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1419}
1420
1421
1422/**
1423 * @opcode 0x2f
1424 * @opfltest af,cf
1425 * @opflmodify cf,pf,af,zf,sf,of
1426 * @opflundef of
1427 */
1428FNIEMOP_DEF(iemOp_das)
1429{
1430 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1431 IEMOP_HLP_NO_64BIT();
1432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1433 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1434 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_das);
1435}
1436
1437
1438/**
1439 * @opcode 0x30
1440 * @opgroup og_gen_arith_bin
1441 * @opflclass logical
1442 */
1443FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1444{
1445 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1446 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1447 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_xor_u8, iemAImpl_xor_u8_locked);
1448}
1449
1450
1451/**
1452 * @opcode 0x31
1453 * @opgroup og_gen_arith_bin
1454 * @opflclass logical
1455 */
1456FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1457{
1458 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1459 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1460 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
1461 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1462}
1463
1464
1465/**
1466 * @opcode 0x32
1467 * @opgroup og_gen_arith_bin
1468 * @opflclass logical
1469 */
1470FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1471{
1472 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1473 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1474 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1475}
1476
1477
1478/**
1479 * @opcode 0x33
1480 * @opgroup og_gen_arith_bin
1481 * @opflclass logical
1482 */
1483FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1484{
1485 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1486 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1487 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1, 0);
1488}
1489
1490
1491/**
1492 * @opcode 0x34
1493 * @opgroup og_gen_arith_bin
1494 * @opflclass logical
1495 */
1496FNIEMOP_DEF(iemOp_xor_Al_Ib)
1497{
1498 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1499 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1500 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1501}
1502
1503
1504/**
1505 * @opcode 0x35
1506 * @opgroup og_gen_arith_bin
1507 * @opflclass logical
1508 */
1509FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1510{
1511 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1512 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1513 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1514}
1515
1516
1517/**
1518 * @opcode 0x36
1519 * @opmnemonic SEG
1520 * @op1 SS
1521 * @opgroup og_prefix
1522 * @openc prefix
1523 * @opdisenum OP_SEG
1524 * @ophints harmless
1525 */
1526FNIEMOP_DEF(iemOp_seg_SS)
1527{
1528 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1529 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1530 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1531
1532 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1533 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1534}
1535
1536
1537/**
1538 * @opcode 0x37
1539 * @opfltest af
1540 * @opflmodify cf,pf,af,zf,sf,of
1541 * @opflundef pf,zf,sf,of
1542 * @opgroup og_gen_arith_dec
1543 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1544 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1545 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1546 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1547 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1548 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1549 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1550 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1551 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1552 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1553 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1554 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1555 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1556 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1557 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1558 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1559 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1560 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1561 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1562 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1563 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1564 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1565 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1566 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1567 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1568 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1569 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1570 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1571 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1572 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1573 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1574 */
1575FNIEMOP_DEF(iemOp_aaa)
1576{
1577 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1578 IEMOP_HLP_NO_64BIT();
1579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1580 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1581
1582 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aaa);
1583}
1584
1585
1586/**
1587 * @opcode 0x38
1588 * @opflclass arithmetic
1589 */
1590FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1591{
1592 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1593 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8);
1594}
1595
1596
1597/**
1598 * @opcode 0x39
1599 * @opflclass arithmetic
1600 */
1601FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1602{
1603 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1604 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
1605}
1606
1607
1608/**
1609 * @opcode 0x3a
1610 * @opflclass arithmetic
1611 */
1612FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1613{
1614 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1615 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1616}
1617
1618
1619/**
1620 * @opcode 0x3b
1621 * @opflclass arithmetic
1622 */
1623FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1624{
1625 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1626 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0, 0);
1627}
1628
1629
1630/**
1631 * @opcode 0x3c
1632 * @opflclass arithmetic
1633 */
1634FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1635{
1636 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1637 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1638}
1639
1640
1641/**
1642 * @opcode 0x3d
1643 * @opflclass arithmetic
1644 */
1645FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1646{
1647 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1648 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1649}
1650
1651
1652/**
1653 * @opcode 0x3e
1654 */
1655FNIEMOP_DEF(iemOp_seg_DS)
1656{
1657 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1658 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1659 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1660
1661 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1662 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1663}
1664
1665
1666/**
1667 * @opcode 0x3f
1668 * @opfltest af
1669 * @opflmodify cf,pf,af,zf,sf,of
1670 * @opflundef pf,zf,sf,of
1671 * @opgroup og_gen_arith_dec
1672 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1673 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1674 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1675 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1676 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1677 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1678 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1679 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1680 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1681 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1682 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1683 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1684 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1685 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1686 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1687 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1688 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1689 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1690 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1691 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1692 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1693 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1694 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1695 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1696 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1697 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1698 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1699 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1700 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1701 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1702 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1703 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1704 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1705 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1706 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1707 */
1708FNIEMOP_DEF(iemOp_aas)
1709{
1710 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1711 IEMOP_HLP_NO_64BIT();
1712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1713 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1714
1715 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aas);
1716}
1717
1718
1719/**
1720 * Common 'inc/dec register' helper.
1721 *
1722 * Not for 64-bit code, only for what became the rex prefixes.
1723 */
1724#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1725 switch (pVCpu->iem.s.enmEffOpSize) \
1726 { \
1727 case IEMMODE_16BIT: \
1728 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT, 0); \
1729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1730 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1731 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1732 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1733 IEM_MC_REF_EFLAGS(pEFlags); \
1734 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1735 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1736 IEM_MC_END(); \
1737 break; \
1738 \
1739 case IEMMODE_32BIT: \
1740 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
1741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1742 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1743 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1744 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1745 IEM_MC_REF_EFLAGS(pEFlags); \
1746 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1747 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
1748 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1749 IEM_MC_END(); \
1750 break; \
1751 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1752 } \
1753 (void)0
1754
1755/**
1756 * @opcode 0x40
1757 * @opflclass incdec
1758 */
1759FNIEMOP_DEF(iemOp_inc_eAX)
1760{
1761 /*
1762 * This is a REX prefix in 64-bit mode.
1763 */
1764 if (IEM_IS_64BIT_CODE(pVCpu))
1765 {
1766 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1767 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1768
1769 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1770 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1771 }
1772
1773 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1774 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1775}
1776
1777
1778/**
1779 * @opcode 0x41
1780 * @opflclass incdec
1781 */
1782FNIEMOP_DEF(iemOp_inc_eCX)
1783{
1784 /*
1785 * This is a REX prefix in 64-bit mode.
1786 */
1787 if (IEM_IS_64BIT_CODE(pVCpu))
1788 {
1789 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1790 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1791 pVCpu->iem.s.uRexB = 1 << 3;
1792
1793 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1794 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1795 }
1796
1797 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1798 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1799}
1800
1801
1802/**
1803 * @opcode 0x42
1804 * @opflclass incdec
1805 */
1806FNIEMOP_DEF(iemOp_inc_eDX)
1807{
1808 /*
1809 * This is a REX prefix in 64-bit mode.
1810 */
1811 if (IEM_IS_64BIT_CODE(pVCpu))
1812 {
1813 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1814 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1815 pVCpu->iem.s.uRexIndex = 1 << 3;
1816
1817 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1818 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1819 }
1820
1821 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1822 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1823}
1824
1825
1826
1827/**
1828 * @opcode 0x43
1829 * @opflclass incdec
1830 */
1831FNIEMOP_DEF(iemOp_inc_eBX)
1832{
1833 /*
1834 * This is a REX prefix in 64-bit mode.
1835 */
1836 if (IEM_IS_64BIT_CODE(pVCpu))
1837 {
1838 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1839 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1840 pVCpu->iem.s.uRexB = 1 << 3;
1841 pVCpu->iem.s.uRexIndex = 1 << 3;
1842
1843 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1844 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1845 }
1846
1847 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1848 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1849}
1850
1851
1852/**
1853 * @opcode 0x44
1854 * @opflclass incdec
1855 */
1856FNIEMOP_DEF(iemOp_inc_eSP)
1857{
1858 /*
1859 * This is a REX prefix in 64-bit mode.
1860 */
1861 if (IEM_IS_64BIT_CODE(pVCpu))
1862 {
1863 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1864 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1865 pVCpu->iem.s.uRexReg = 1 << 3;
1866
1867 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1868 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1869 }
1870
1871 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1872 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1873}
1874
1875
1876/**
1877 * @opcode 0x45
1878 * @opflclass incdec
1879 */
1880FNIEMOP_DEF(iemOp_inc_eBP)
1881{
1882 /*
1883 * This is a REX prefix in 64-bit mode.
1884 */
1885 if (IEM_IS_64BIT_CODE(pVCpu))
1886 {
1887 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1888 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1889 pVCpu->iem.s.uRexReg = 1 << 3;
1890 pVCpu->iem.s.uRexB = 1 << 3;
1891
1892 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1893 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1894 }
1895
1896 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1897 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1898}
1899
1900
1901/**
1902 * @opcode 0x46
1903 * @opflclass incdec
1904 */
1905FNIEMOP_DEF(iemOp_inc_eSI)
1906{
1907 /*
1908 * This is a REX prefix in 64-bit mode.
1909 */
1910 if (IEM_IS_64BIT_CODE(pVCpu))
1911 {
1912 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1913 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1914 pVCpu->iem.s.uRexReg = 1 << 3;
1915 pVCpu->iem.s.uRexIndex = 1 << 3;
1916
1917 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1918 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1919 }
1920
1921 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1922 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1923}
1924
1925
1926/**
1927 * @opcode 0x47
1928 * @opflclass incdec
1929 */
1930FNIEMOP_DEF(iemOp_inc_eDI)
1931{
1932 /*
1933 * This is a REX prefix in 64-bit mode.
1934 */
1935 if (IEM_IS_64BIT_CODE(pVCpu))
1936 {
1937 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1938 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1939 pVCpu->iem.s.uRexReg = 1 << 3;
1940 pVCpu->iem.s.uRexB = 1 << 3;
1941 pVCpu->iem.s.uRexIndex = 1 << 3;
1942
1943 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1944 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1945 }
1946
1947 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1948 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
1949}
1950
1951
1952/**
1953 * @opcode 0x48
1954 * @opflclass incdec
1955 */
1956FNIEMOP_DEF(iemOp_dec_eAX)
1957{
1958 /*
1959 * This is a REX prefix in 64-bit mode.
1960 */
1961 if (IEM_IS_64BIT_CODE(pVCpu))
1962 {
1963 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1964 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1965 iemRecalEffOpSize(pVCpu);
1966
1967 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1968 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1969 }
1970
1971 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1972 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
1973}
1974
1975
1976/**
1977 * @opcode 0x49
1978 * @opflclass incdec
1979 */
1980FNIEMOP_DEF(iemOp_dec_eCX)
1981{
1982 /*
1983 * This is a REX prefix in 64-bit mode.
1984 */
1985 if (IEM_IS_64BIT_CODE(pVCpu))
1986 {
1987 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1988 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1989 pVCpu->iem.s.uRexB = 1 << 3;
1990 iemRecalEffOpSize(pVCpu);
1991
1992 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1993 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1994 }
1995
1996 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1997 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
1998}
1999
2000
2001/**
2002 * @opcode 0x4a
2003 * @opflclass incdec
2004 */
2005FNIEMOP_DEF(iemOp_dec_eDX)
2006{
2007 /*
2008 * This is a REX prefix in 64-bit mode.
2009 */
2010 if (IEM_IS_64BIT_CODE(pVCpu))
2011 {
2012 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2013 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2014 pVCpu->iem.s.uRexIndex = 1 << 3;
2015 iemRecalEffOpSize(pVCpu);
2016
2017 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2018 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2019 }
2020
2021 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2022 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2023}
2024
2025
2026/**
2027 * @opcode 0x4b
2028 * @opflclass incdec
2029 */
2030FNIEMOP_DEF(iemOp_dec_eBX)
2031{
2032 /*
2033 * This is a REX prefix in 64-bit mode.
2034 */
2035 if (IEM_IS_64BIT_CODE(pVCpu))
2036 {
2037 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2038 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2039 pVCpu->iem.s.uRexB = 1 << 3;
2040 pVCpu->iem.s.uRexIndex = 1 << 3;
2041 iemRecalEffOpSize(pVCpu);
2042
2043 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2044 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2045 }
2046
2047 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2048 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2049}
2050
2051
2052/**
2053 * @opcode 0x4c
2054 * @opflclass incdec
2055 */
2056FNIEMOP_DEF(iemOp_dec_eSP)
2057{
2058 /*
2059 * This is a REX prefix in 64-bit mode.
2060 */
2061 if (IEM_IS_64BIT_CODE(pVCpu))
2062 {
2063 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2064 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2065 pVCpu->iem.s.uRexReg = 1 << 3;
2066 iemRecalEffOpSize(pVCpu);
2067
2068 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2069 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2070 }
2071
2072 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2073 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2074}
2075
2076
2077/**
2078 * @opcode 0x4d
2079 * @opflclass incdec
2080 */
2081FNIEMOP_DEF(iemOp_dec_eBP)
2082{
2083 /*
2084 * This is a REX prefix in 64-bit mode.
2085 */
2086 if (IEM_IS_64BIT_CODE(pVCpu))
2087 {
2088 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2089 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2090 pVCpu->iem.s.uRexReg = 1 << 3;
2091 pVCpu->iem.s.uRexB = 1 << 3;
2092 iemRecalEffOpSize(pVCpu);
2093
2094 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2095 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2096 }
2097
2098 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2099 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2100}
2101
2102
2103/**
2104 * @opcode 0x4e
2105 * @opflclass incdec
2106 */
2107FNIEMOP_DEF(iemOp_dec_eSI)
2108{
2109 /*
2110 * This is a REX prefix in 64-bit mode.
2111 */
2112 if (IEM_IS_64BIT_CODE(pVCpu))
2113 {
2114 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2115 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2116 pVCpu->iem.s.uRexReg = 1 << 3;
2117 pVCpu->iem.s.uRexIndex = 1 << 3;
2118 iemRecalEffOpSize(pVCpu);
2119
2120 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2121 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2122 }
2123
2124 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2125 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2126}
2127
2128
2129/**
2130 * @opcode 0x4f
2131 * @opflclass incdec
2132 */
2133FNIEMOP_DEF(iemOp_dec_eDI)
2134{
2135 /*
2136 * This is a REX prefix in 64-bit mode.
2137 */
2138 if (IEM_IS_64BIT_CODE(pVCpu))
2139 {
2140 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2141 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2142 pVCpu->iem.s.uRexReg = 1 << 3;
2143 pVCpu->iem.s.uRexB = 1 << 3;
2144 pVCpu->iem.s.uRexIndex = 1 << 3;
2145 iemRecalEffOpSize(pVCpu);
2146
2147 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2148 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2149 }
2150
2151 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2152 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2153}
2154
2155
2156/**
2157 * Common 'push register' helper.
2158 */
2159FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2160{
2161 if (IEM_IS_64BIT_CODE(pVCpu))
2162 {
2163 iReg |= pVCpu->iem.s.uRexB;
2164 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2165 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2166 }
2167
2168 switch (pVCpu->iem.s.enmEffOpSize)
2169 {
2170 case IEMMODE_16BIT:
2171 IEM_MC_BEGIN(0, 1, 0, 0);
2172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2173 IEM_MC_LOCAL(uint16_t, u16Value);
2174 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2175 IEM_MC_PUSH_U16(u16Value);
2176 IEM_MC_ADVANCE_RIP_AND_FINISH();
2177 IEM_MC_END();
2178 break;
2179
2180 case IEMMODE_32BIT:
2181 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2183 IEM_MC_LOCAL(uint32_t, u32Value);
2184 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2185 IEM_MC_PUSH_U32(u32Value);
2186 IEM_MC_ADVANCE_RIP_AND_FINISH();
2187 IEM_MC_END();
2188 break;
2189
2190 case IEMMODE_64BIT:
2191 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2193 IEM_MC_LOCAL(uint64_t, u64Value);
2194 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2195 IEM_MC_PUSH_U64(u64Value);
2196 IEM_MC_ADVANCE_RIP_AND_FINISH();
2197 IEM_MC_END();
2198 break;
2199
2200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2201 }
2202}
2203
2204
2205/**
2206 * @opcode 0x50
2207 */
2208FNIEMOP_DEF(iemOp_push_eAX)
2209{
2210 IEMOP_MNEMONIC(push_rAX, "push rAX");
2211 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2212}
2213
2214
2215/**
2216 * @opcode 0x51
2217 */
2218FNIEMOP_DEF(iemOp_push_eCX)
2219{
2220 IEMOP_MNEMONIC(push_rCX, "push rCX");
2221 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2222}
2223
2224
2225/**
2226 * @opcode 0x52
2227 */
2228FNIEMOP_DEF(iemOp_push_eDX)
2229{
2230 IEMOP_MNEMONIC(push_rDX, "push rDX");
2231 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2232}
2233
2234
2235/**
2236 * @opcode 0x53
2237 */
2238FNIEMOP_DEF(iemOp_push_eBX)
2239{
2240 IEMOP_MNEMONIC(push_rBX, "push rBX");
2241 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2242}
2243
2244
2245/**
2246 * @opcode 0x54
2247 */
2248FNIEMOP_DEF(iemOp_push_eSP)
2249{
2250 IEMOP_MNEMONIC(push_rSP, "push rSP");
2251 if (IEM_GET_TARGET_CPU(pVCpu) != IEMTARGETCPU_8086)
2252 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2253
2254 /* 8086 works differently wrt to 'push sp' compared to 80186 and later. */
2255 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086, 0);
2256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2257 IEM_MC_LOCAL(uint16_t, u16Value);
2258 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2259 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2260 IEM_MC_PUSH_U16(u16Value);
2261 IEM_MC_ADVANCE_RIP_AND_FINISH();
2262 IEM_MC_END();
2263}
2264
2265
2266/**
2267 * @opcode 0x55
2268 */
2269FNIEMOP_DEF(iemOp_push_eBP)
2270{
2271 IEMOP_MNEMONIC(push_rBP, "push rBP");
2272 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2273}
2274
2275
2276/**
2277 * @opcode 0x56
2278 */
2279FNIEMOP_DEF(iemOp_push_eSI)
2280{
2281 IEMOP_MNEMONIC(push_rSI, "push rSI");
2282 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2283}
2284
2285
2286/**
2287 * @opcode 0x57
2288 */
2289FNIEMOP_DEF(iemOp_push_eDI)
2290{
2291 IEMOP_MNEMONIC(push_rDI, "push rDI");
2292 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2293}
2294
2295
2296/**
2297 * Common 'pop register' helper.
2298 */
2299FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2300{
2301 if (IEM_IS_64BIT_CODE(pVCpu))
2302 {
2303 iReg |= pVCpu->iem.s.uRexB;
2304 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2305 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2306 }
2307
2308 switch (pVCpu->iem.s.enmEffOpSize)
2309 {
2310 case IEMMODE_16BIT:
2311 IEM_MC_BEGIN(0, 0, 0, 0);
2312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2313 IEM_MC_POP_GREG_U16(iReg);
2314 IEM_MC_ADVANCE_RIP_AND_FINISH();
2315 IEM_MC_END();
2316 break;
2317
2318 case IEMMODE_32BIT:
2319 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2321 IEM_MC_POP_GREG_U32(iReg);
2322 IEM_MC_ADVANCE_RIP_AND_FINISH();
2323 IEM_MC_END();
2324 break;
2325
2326 case IEMMODE_64BIT:
2327 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
2328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2329 IEM_MC_POP_GREG_U64(iReg);
2330 IEM_MC_ADVANCE_RIP_AND_FINISH();
2331 IEM_MC_END();
2332 break;
2333
2334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2335 }
2336}
2337
2338
2339/**
2340 * @opcode 0x58
2341 */
2342FNIEMOP_DEF(iemOp_pop_eAX)
2343{
2344 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2345 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2346}
2347
2348
2349/**
2350 * @opcode 0x59
2351 */
2352FNIEMOP_DEF(iemOp_pop_eCX)
2353{
2354 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2355 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2356}
2357
2358
2359/**
2360 * @opcode 0x5a
2361 */
2362FNIEMOP_DEF(iemOp_pop_eDX)
2363{
2364 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2365 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2366}
2367
2368
2369/**
2370 * @opcode 0x5b
2371 */
2372FNIEMOP_DEF(iemOp_pop_eBX)
2373{
2374 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2375 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2376}
2377
2378
2379/**
2380 * @opcode 0x5c
2381 */
2382FNIEMOP_DEF(iemOp_pop_eSP)
2383{
2384 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2385 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2386}
2387
2388
2389/**
2390 * @opcode 0x5d
2391 */
2392FNIEMOP_DEF(iemOp_pop_eBP)
2393{
2394 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2395 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2396}
2397
2398
2399/**
2400 * @opcode 0x5e
2401 */
2402FNIEMOP_DEF(iemOp_pop_eSI)
2403{
2404 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2405 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2406}
2407
2408
2409/**
2410 * @opcode 0x5f
2411 */
2412FNIEMOP_DEF(iemOp_pop_eDI)
2413{
2414 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2415 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2416}
2417
2418
2419/**
2420 * @opcode 0x60
2421 */
2422FNIEMOP_DEF(iemOp_pusha)
2423{
2424 IEMOP_MNEMONIC(pusha, "pusha");
2425 IEMOP_HLP_MIN_186();
2426 IEMOP_HLP_NO_64BIT();
2427 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2428 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_16);
2429 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2430 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_32);
2431}
2432
2433
2434/**
2435 * @opcode 0x61
2436 */
2437FNIEMOP_DEF(iemOp_popa__mvex)
2438{
2439 if (!IEM_IS_64BIT_CODE(pVCpu))
2440 {
2441 IEMOP_MNEMONIC(popa, "popa");
2442 IEMOP_HLP_MIN_186();
2443 IEMOP_HLP_NO_64BIT();
2444 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2445 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2446 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2447 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2448 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2449 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2450 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2451 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2452 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2453 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2454 iemCImpl_popa_16);
2455 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2456 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2457 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2458 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2459 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2460 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2461 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2462 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2463 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2464 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2465 iemCImpl_popa_32);
2466 }
2467 IEMOP_MNEMONIC(mvex, "mvex");
2468 Log(("mvex prefix is not supported!\n"));
2469 IEMOP_RAISE_INVALID_OPCODE_RET();
2470}
2471
2472
2473/**
2474 * @opcode 0x62
2475 * @opmnemonic bound
2476 * @op1 Gv_RO
2477 * @op2 Ma
2478 * @opmincpu 80186
2479 * @ophints harmless x86_invalid_64
2480 * @optest op1=0 op2=0 ->
2481 * @optest op1=1 op2=0 -> value.xcpt=5
2482 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2483 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2484 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2485 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2486 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2487 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2488 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2489 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2490 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2491 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2492 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2493 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2494 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2495 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2496 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2497 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2498 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2499 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2500 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2501 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2502 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2503 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2504 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2505 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2506 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2507 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2508 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2509 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2510 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2511 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2512 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2513 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2514 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2515 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2516 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2517 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2518 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2519 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2520 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2521 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2522 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2523 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2524 */
2525FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2526{
2527 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2528 compatability mode it is invalid with MOD=3.
2529
2530 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2531 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2532 given as R and X without an exact description, so we assume it builds on
2533 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2534 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2535 uint8_t bRm;
2536 if (!IEM_IS_64BIT_CODE(pVCpu))
2537 {
2538 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2539 IEMOP_HLP_MIN_186();
2540 IEM_OPCODE_GET_NEXT_U8(&bRm);
2541 if (IEM_IS_MODRM_MEM_MODE(bRm))
2542 {
2543 /** @todo testcase: check that there are two memory accesses involved. Check
2544 * whether they're both read before the \#BR triggers. */
2545 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2546 {
2547 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
2548 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2549 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2550 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2552
2553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2555
2556 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2557 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2558 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2559
2560 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2561 IEM_MC_END();
2562 }
2563 else /* 32-bit operands */
2564 {
2565 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2566 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2567 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2568 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2570
2571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2573
2574 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2575 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2576 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2577
2578 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2579 IEM_MC_END();
2580 }
2581 }
2582
2583 /*
2584 * @opdone
2585 */
2586 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2587 {
2588 /* Note that there is no need for the CPU to fetch further bytes
2589 here because MODRM.MOD == 3. */
2590 Log(("evex not supported by the guest CPU!\n"));
2591 IEMOP_RAISE_INVALID_OPCODE_RET();
2592 }
2593 }
2594 else
2595 {
2596 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2597 * does modr/m read, whereas AMD probably doesn't... */
2598 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2599 {
2600 Log(("evex not supported by the guest CPU!\n"));
2601 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2602 }
2603 IEM_OPCODE_GET_NEXT_U8(&bRm);
2604 }
2605
2606 IEMOP_MNEMONIC(evex, "evex");
2607 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2608 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2609 Log(("evex prefix is not implemented!\n"));
2610 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2611}
2612
2613
2614/**
2615 * @opcode 0x63
2616 * @opflmodify zf
2617 * @note non-64-bit modes.
2618 */
2619FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2620{
2621 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2622 IEMOP_HLP_MIN_286();
2623 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2624 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2625
2626 if (IEM_IS_MODRM_REG_MODE(bRm))
2627 {
2628 /* Register */
2629 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2630 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2631 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2632 IEM_MC_ARG(uint16_t, u16Src, 1);
2633 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2634
2635 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2636 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2637 IEM_MC_REF_EFLAGS(pEFlags);
2638 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2639
2640 IEM_MC_ADVANCE_RIP_AND_FINISH();
2641 IEM_MC_END();
2642 }
2643 else
2644 {
2645 /* Memory */
2646 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2647 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2648 IEM_MC_ARG(uint16_t, u16Src, 1);
2649 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2651 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2652
2653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2654 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2655 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2656 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2657 IEM_MC_FETCH_EFLAGS(EFlags);
2658 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2659
2660 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
2661 IEM_MC_COMMIT_EFLAGS(EFlags);
2662 IEM_MC_ADVANCE_RIP_AND_FINISH();
2663 IEM_MC_END();
2664 }
2665}
2666
2667
2668/**
2669 * @opcode 0x63
2670 *
2671 * @note This is a weird one. It works like a regular move instruction if
2672 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2673 * @todo This definitely needs a testcase to verify the odd cases. */
2674FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2675{
2676 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2677
2678 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2680
2681 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2682 {
2683 if (IEM_IS_MODRM_REG_MODE(bRm))
2684 {
2685 /*
2686 * Register to register.
2687 */
2688 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2690 IEM_MC_LOCAL(uint64_t, u64Value);
2691 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2692 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2693 IEM_MC_ADVANCE_RIP_AND_FINISH();
2694 IEM_MC_END();
2695 }
2696 else
2697 {
2698 /*
2699 * We're loading a register from memory.
2700 */
2701 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
2702 IEM_MC_LOCAL(uint64_t, u64Value);
2703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2706 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2707 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2708 IEM_MC_ADVANCE_RIP_AND_FINISH();
2709 IEM_MC_END();
2710 }
2711 }
2712 else
2713 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2714}
2715
2716
2717/**
2718 * @opcode 0x64
2719 * @opmnemonic segfs
2720 * @opmincpu 80386
2721 * @opgroup og_prefixes
2722 */
2723FNIEMOP_DEF(iemOp_seg_FS)
2724{
2725 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2726 IEMOP_HLP_MIN_386();
2727
2728 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2729 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2730
2731 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2732 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2733}
2734
2735
2736/**
2737 * @opcode 0x65
2738 * @opmnemonic seggs
2739 * @opmincpu 80386
2740 * @opgroup og_prefixes
2741 */
2742FNIEMOP_DEF(iemOp_seg_GS)
2743{
2744 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2745 IEMOP_HLP_MIN_386();
2746
2747 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2748 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2749
2750 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2751 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2752}
2753
2754
2755/**
2756 * @opcode 0x66
2757 * @opmnemonic opsize
2758 * @openc prefix
2759 * @opmincpu 80386
2760 * @ophints harmless
2761 * @opgroup og_prefixes
2762 */
2763FNIEMOP_DEF(iemOp_op_size)
2764{
2765 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2766 IEMOP_HLP_MIN_386();
2767
2768 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2769 iemRecalEffOpSize(pVCpu);
2770
2771 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2772 when REPZ or REPNZ are present. */
2773 if (pVCpu->iem.s.idxPrefix == 0)
2774 pVCpu->iem.s.idxPrefix = 1;
2775
2776 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2777 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2778}
2779
2780
2781/**
2782 * @opcode 0x67
2783 * @opmnemonic addrsize
2784 * @openc prefix
2785 * @opmincpu 80386
2786 * @ophints harmless
2787 * @opgroup og_prefixes
2788 */
2789FNIEMOP_DEF(iemOp_addr_size)
2790{
2791 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2792 IEMOP_HLP_MIN_386();
2793
2794 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2795 switch (pVCpu->iem.s.enmDefAddrMode)
2796 {
2797 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2798 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2799 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2800 default: AssertFailed();
2801 }
2802
2803 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2804 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2805}
2806
2807
2808/**
2809 * @opcode 0x68
2810 */
2811FNIEMOP_DEF(iemOp_push_Iz)
2812{
2813 IEMOP_MNEMONIC(push_Iz, "push Iz");
2814 IEMOP_HLP_MIN_186();
2815 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2816 switch (pVCpu->iem.s.enmEffOpSize)
2817 {
2818 case IEMMODE_16BIT:
2819 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
2820 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2822 IEM_MC_LOCAL_CONST(uint16_t, u16Value, u16Imm);
2823 IEM_MC_PUSH_U16(u16Value);
2824 IEM_MC_ADVANCE_RIP_AND_FINISH();
2825 IEM_MC_END();
2826 break;
2827
2828 case IEMMODE_32BIT:
2829 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2830 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2832 IEM_MC_LOCAL_CONST(uint32_t, u32Value, u32Imm);
2833 IEM_MC_PUSH_U32(u32Value);
2834 IEM_MC_ADVANCE_RIP_AND_FINISH();
2835 IEM_MC_END();
2836 break;
2837
2838 case IEMMODE_64BIT:
2839 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2840 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2842 IEM_MC_LOCAL_CONST(uint64_t, u64Value, u64Imm);
2843 IEM_MC_PUSH_U64(u64Value);
2844 IEM_MC_ADVANCE_RIP_AND_FINISH();
2845 IEM_MC_END();
2846 break;
2847
2848 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2849 }
2850}
2851
2852
2853/**
2854 * @opcode 0x69
2855 * @opflclass multiply
2856 */
2857FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2858{
2859 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2860 IEMOP_HLP_MIN_186();
2861 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2862 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2863
2864 switch (pVCpu->iem.s.enmEffOpSize)
2865 {
2866 case IEMMODE_16BIT:
2867 {
2868 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2869 if (IEM_IS_MODRM_REG_MODE(bRm))
2870 {
2871 /* register operand */
2872 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2873 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
2874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2875 IEM_MC_LOCAL(uint16_t, u16Tmp);
2876 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2877 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2878 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
2879 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2880 IEM_MC_REF_EFLAGS(pEFlags);
2881 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2882 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2883
2884 IEM_MC_ADVANCE_RIP_AND_FINISH();
2885 IEM_MC_END();
2886 }
2887 else
2888 {
2889 /* memory operand */
2890 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
2891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2893
2894 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2896
2897 IEM_MC_LOCAL(uint16_t, u16Tmp);
2898 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2899
2900 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2901 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
2902 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2903 IEM_MC_REF_EFLAGS(pEFlags);
2904 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2905 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2906
2907 IEM_MC_ADVANCE_RIP_AND_FINISH();
2908 IEM_MC_END();
2909 }
2910 break;
2911 }
2912
2913 case IEMMODE_32BIT:
2914 {
2915 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2916 if (IEM_IS_MODRM_REG_MODE(bRm))
2917 {
2918 /* register operand */
2919 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2920 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
2921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2922 IEM_MC_LOCAL(uint32_t, u32Tmp);
2923 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2924
2925 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
2926 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
2927 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2928 IEM_MC_REF_EFLAGS(pEFlags);
2929 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2930 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2931
2932 IEM_MC_ADVANCE_RIP_AND_FINISH();
2933 IEM_MC_END();
2934 }
2935 else
2936 {
2937 /* memory operand */
2938 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
2939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2941
2942 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2944
2945 IEM_MC_LOCAL(uint32_t, u32Tmp);
2946 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2947
2948 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
2949 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
2950 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2951 IEM_MC_REF_EFLAGS(pEFlags);
2952 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2953 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2954
2955 IEM_MC_ADVANCE_RIP_AND_FINISH();
2956 IEM_MC_END();
2957 }
2958 break;
2959 }
2960
2961 case IEMMODE_64BIT:
2962 {
2963 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
2964 if (IEM_IS_MODRM_REG_MODE(bRm))
2965 {
2966 /* register operand */
2967 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2968 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
2969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2970 IEM_MC_LOCAL(uint64_t, u64Tmp);
2971 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2972
2973 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
2974 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
2975 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2976 IEM_MC_REF_EFLAGS(pEFlags);
2977 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
2978 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2979
2980 IEM_MC_ADVANCE_RIP_AND_FINISH();
2981 IEM_MC_END();
2982 }
2983 else
2984 {
2985 /* memory operand */
2986 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
2987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2989
2990 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
2991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /* parameter count for the threaded function for this block. */
2992
2993 IEM_MC_LOCAL(uint64_t, u64Tmp);
2994 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2995
2996 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
2997 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int32_t)u32Imm, 1);
2998 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2999 IEM_MC_REF_EFLAGS(pEFlags);
3000 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3001 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3002
3003 IEM_MC_ADVANCE_RIP_AND_FINISH();
3004 IEM_MC_END();
3005 }
3006 break;
3007 }
3008
3009 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3010 }
3011}
3012
3013
3014/**
3015 * @opcode 0x6a
3016 */
3017FNIEMOP_DEF(iemOp_push_Ib)
3018{
3019 IEMOP_MNEMONIC(push_Ib, "push Ib");
3020 IEMOP_HLP_MIN_186();
3021 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3022 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3023
3024 switch (pVCpu->iem.s.enmEffOpSize)
3025 {
3026 case IEMMODE_16BIT:
3027 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_186, 0);
3028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3029 IEM_MC_LOCAL_CONST(uint16_t, uValue, (int16_t)i8Imm);
3030 IEM_MC_PUSH_U16(uValue);
3031 IEM_MC_ADVANCE_RIP_AND_FINISH();
3032 IEM_MC_END();
3033 break;
3034 case IEMMODE_32BIT:
3035 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3037 IEM_MC_LOCAL_CONST(uint32_t, uValue, (int32_t)i8Imm);
3038 IEM_MC_PUSH_U32(uValue);
3039 IEM_MC_ADVANCE_RIP_AND_FINISH();
3040 IEM_MC_END();
3041 break;
3042 case IEMMODE_64BIT:
3043 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
3044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3045 IEM_MC_LOCAL_CONST(uint64_t, uValue, (int64_t)i8Imm);
3046 IEM_MC_PUSH_U64(uValue);
3047 IEM_MC_ADVANCE_RIP_AND_FINISH();
3048 IEM_MC_END();
3049 break;
3050 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3051 }
3052}
3053
3054
3055/**
3056 * @opcode 0x6b
3057 * @opflclass multiply
3058 */
3059FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3060{
3061 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3062 IEMOP_HLP_MIN_186();
3063 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3064 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3065
3066 switch (pVCpu->iem.s.enmEffOpSize)
3067 {
3068 case IEMMODE_16BIT:
3069 {
3070 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3071 if (IEM_IS_MODRM_REG_MODE(bRm))
3072 {
3073 /* register operand */
3074 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3075 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3077
3078 IEM_MC_LOCAL(uint16_t, u16Tmp);
3079 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3080
3081 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3082 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3083 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3084 IEM_MC_REF_EFLAGS(pEFlags);
3085 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3086 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3087
3088 IEM_MC_ADVANCE_RIP_AND_FINISH();
3089 IEM_MC_END();
3090 }
3091 else
3092 {
3093 /* memory operand */
3094 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3095
3096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3098
3099 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3101
3102 IEM_MC_LOCAL(uint16_t, u16Tmp);
3103 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3104
3105 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3106 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3107 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3108 IEM_MC_REF_EFLAGS(pEFlags);
3109 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3110 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3111
3112 IEM_MC_ADVANCE_RIP_AND_FINISH();
3113 IEM_MC_END();
3114 }
3115 break;
3116 }
3117
3118 case IEMMODE_32BIT:
3119 {
3120 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3121 if (IEM_IS_MODRM_REG_MODE(bRm))
3122 {
3123 /* register operand */
3124 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3125 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3127 IEM_MC_LOCAL(uint32_t, u32Tmp);
3128 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3129
3130 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3131 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3133 IEM_MC_REF_EFLAGS(pEFlags);
3134 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3135 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3136
3137 IEM_MC_ADVANCE_RIP_AND_FINISH();
3138 IEM_MC_END();
3139 }
3140 else
3141 {
3142 /* memory operand */
3143 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3144 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3146
3147 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3149
3150 IEM_MC_LOCAL(uint32_t, u32Tmp);
3151 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3152
3153 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3154 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3155 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3156 IEM_MC_REF_EFLAGS(pEFlags);
3157 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3158 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3159
3160 IEM_MC_ADVANCE_RIP_AND_FINISH();
3161 IEM_MC_END();
3162 }
3163 break;
3164 }
3165
3166 case IEMMODE_64BIT:
3167 {
3168 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3169 if (IEM_IS_MODRM_REG_MODE(bRm))
3170 {
3171 /* register operand */
3172 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3173 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3175 IEM_MC_LOCAL(uint64_t, u64Tmp);
3176 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3177
3178 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3179 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3180 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3181 IEM_MC_REF_EFLAGS(pEFlags);
3182 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3183 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3184
3185 IEM_MC_ADVANCE_RIP_AND_FINISH();
3186 IEM_MC_END();
3187 }
3188 else
3189 {
3190 /* memory operand */
3191 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3194
3195 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the threaded parameter count. */
3196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3197
3198 IEM_MC_LOCAL(uint64_t, u64Tmp);
3199 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3200
3201 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3202 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3203 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3204 IEM_MC_REF_EFLAGS(pEFlags);
3205 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3206 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3207
3208 IEM_MC_ADVANCE_RIP_AND_FINISH();
3209 IEM_MC_END();
3210 }
3211 break;
3212 }
3213
3214 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3215 }
3216}
3217
3218
3219/**
3220 * @opcode 0x6c
3221 * @opfltest iopl,df
3222 */
3223FNIEMOP_DEF(iemOp_insb_Yb_DX)
3224{
3225 IEMOP_HLP_MIN_186();
3226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3227 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3228 {
3229 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3230 switch (pVCpu->iem.s.enmEffAddrMode)
3231 {
3232 case IEMMODE_16BIT:
3233 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3234 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3235 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3236 iemCImpl_rep_ins_op8_addr16, false);
3237 case IEMMODE_32BIT:
3238 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3239 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3240 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3241 iemCImpl_rep_ins_op8_addr32, false);
3242 case IEMMODE_64BIT:
3243 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3244 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3245 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3246 iemCImpl_rep_ins_op8_addr64, false);
3247 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3248 }
3249 }
3250 else
3251 {
3252 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3253 switch (pVCpu->iem.s.enmEffAddrMode)
3254 {
3255 case IEMMODE_16BIT:
3256 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3257 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3258 iemCImpl_ins_op8_addr16, false);
3259 case IEMMODE_32BIT:
3260 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3261 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3262 iemCImpl_ins_op8_addr32, false);
3263 case IEMMODE_64BIT:
3264 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3265 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3266 iemCImpl_ins_op8_addr64, false);
3267 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3268 }
3269 }
3270}
3271
3272
3273/**
3274 * @opcode 0x6d
3275 * @opfltest iopl,df
3276 */
3277FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3278{
3279 IEMOP_HLP_MIN_186();
3280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3281 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3282 {
3283 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3284 switch (pVCpu->iem.s.enmEffOpSize)
3285 {
3286 case IEMMODE_16BIT:
3287 switch (pVCpu->iem.s.enmEffAddrMode)
3288 {
3289 case IEMMODE_16BIT:
3290 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3291 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3292 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3293 iemCImpl_rep_ins_op16_addr16, false);
3294 case IEMMODE_32BIT:
3295 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3296 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3297 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3298 iemCImpl_rep_ins_op16_addr32, false);
3299 case IEMMODE_64BIT:
3300 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3301 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3302 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3303 iemCImpl_rep_ins_op16_addr64, false);
3304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3305 }
3306 break;
3307 case IEMMODE_64BIT:
3308 case IEMMODE_32BIT:
3309 switch (pVCpu->iem.s.enmEffAddrMode)
3310 {
3311 case IEMMODE_16BIT:
3312 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3313 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3314 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3315 iemCImpl_rep_ins_op32_addr16, false);
3316 case IEMMODE_32BIT:
3317 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3318 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3319 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3320 iemCImpl_rep_ins_op32_addr32, false);
3321 case IEMMODE_64BIT:
3322 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3323 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3324 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3325 iemCImpl_rep_ins_op32_addr64, false);
3326 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3327 }
3328 break;
3329 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3330 }
3331 }
3332 else
3333 {
3334 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3335 switch (pVCpu->iem.s.enmEffOpSize)
3336 {
3337 case IEMMODE_16BIT:
3338 switch (pVCpu->iem.s.enmEffAddrMode)
3339 {
3340 case IEMMODE_16BIT:
3341 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3342 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3343 iemCImpl_ins_op16_addr16, false);
3344 case IEMMODE_32BIT:
3345 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3346 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3347 iemCImpl_ins_op16_addr32, false);
3348 case IEMMODE_64BIT:
3349 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3350 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3351 iemCImpl_ins_op16_addr64, false);
3352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3353 }
3354 break;
3355 case IEMMODE_64BIT:
3356 case IEMMODE_32BIT:
3357 switch (pVCpu->iem.s.enmEffAddrMode)
3358 {
3359 case IEMMODE_16BIT:
3360 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3361 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3362 iemCImpl_ins_op32_addr16, false);
3363 case IEMMODE_32BIT:
3364 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3365 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3366 iemCImpl_ins_op32_addr32, false);
3367 case IEMMODE_64BIT:
3368 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3369 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3370 iemCImpl_ins_op32_addr64, false);
3371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3372 }
3373 break;
3374 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3375 }
3376 }
3377}
3378
3379
3380/**
3381 * @opcode 0x6e
3382 * @opfltest iopl,df
3383 */
3384FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3385{
3386 IEMOP_HLP_MIN_186();
3387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3388 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3389 {
3390 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3391 switch (pVCpu->iem.s.enmEffAddrMode)
3392 {
3393 case IEMMODE_16BIT:
3394 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3395 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3396 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3397 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3398 case IEMMODE_32BIT:
3399 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3400 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3401 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3402 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3403 case IEMMODE_64BIT:
3404 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3405 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3406 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3407 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3408 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3409 }
3410 }
3411 else
3412 {
3413 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3414 switch (pVCpu->iem.s.enmEffAddrMode)
3415 {
3416 case IEMMODE_16BIT:
3417 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3418 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3419 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3420 case IEMMODE_32BIT:
3421 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3422 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3423 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3424 case IEMMODE_64BIT:
3425 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3426 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3427 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3429 }
3430 }
3431}
3432
3433
3434/**
3435 * @opcode 0x6f
3436 * @opfltest iopl,df
3437 */
3438FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3439{
3440 IEMOP_HLP_MIN_186();
3441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3442 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3443 {
3444 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3445 switch (pVCpu->iem.s.enmEffOpSize)
3446 {
3447 case IEMMODE_16BIT:
3448 switch (pVCpu->iem.s.enmEffAddrMode)
3449 {
3450 case IEMMODE_16BIT:
3451 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3452 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3453 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3454 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3455 case IEMMODE_32BIT:
3456 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3457 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3458 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3459 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3460 case IEMMODE_64BIT:
3461 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3462 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3463 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3464 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3465 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3466 }
3467 break;
3468 case IEMMODE_64BIT:
3469 case IEMMODE_32BIT:
3470 switch (pVCpu->iem.s.enmEffAddrMode)
3471 {
3472 case IEMMODE_16BIT:
3473 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3474 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3475 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3476 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3477 case IEMMODE_32BIT:
3478 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3479 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3480 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3481 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3482 case IEMMODE_64BIT:
3483 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3484 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3485 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3486 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3488 }
3489 break;
3490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3491 }
3492 }
3493 else
3494 {
3495 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3496 switch (pVCpu->iem.s.enmEffOpSize)
3497 {
3498 case IEMMODE_16BIT:
3499 switch (pVCpu->iem.s.enmEffAddrMode)
3500 {
3501 case IEMMODE_16BIT:
3502 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3503 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3504 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3505 case IEMMODE_32BIT:
3506 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3507 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3508 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3509 case IEMMODE_64BIT:
3510 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3511 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3512 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3513 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3514 }
3515 break;
3516 case IEMMODE_64BIT:
3517 case IEMMODE_32BIT:
3518 switch (pVCpu->iem.s.enmEffAddrMode)
3519 {
3520 case IEMMODE_16BIT:
3521 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3522 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3523 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3524 case IEMMODE_32BIT:
3525 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3526 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3527 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3528 case IEMMODE_64BIT:
3529 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3530 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3531 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3532 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3533 }
3534 break;
3535 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3536 }
3537 }
3538}
3539
3540
3541/**
3542 * @opcode 0x70
3543 * @opfltest of
3544 */
3545FNIEMOP_DEF(iemOp_jo_Jb)
3546{
3547 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3548 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3549 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3550
3551 IEM_MC_BEGIN(0, 0, 0, 0);
3552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3553 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3554 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3555 } IEM_MC_ELSE() {
3556 IEM_MC_ADVANCE_RIP_AND_FINISH();
3557 } IEM_MC_ENDIF();
3558 IEM_MC_END();
3559}
3560
3561
3562/**
3563 * @opcode 0x71
3564 * @opfltest of
3565 */
3566FNIEMOP_DEF(iemOp_jno_Jb)
3567{
3568 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3569 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3570 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3571
3572 IEM_MC_BEGIN(0, 0, 0, 0);
3573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3574 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3575 IEM_MC_ADVANCE_RIP_AND_FINISH();
3576 } IEM_MC_ELSE() {
3577 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3578 } IEM_MC_ENDIF();
3579 IEM_MC_END();
3580}
3581
3582/**
3583 * @opcode 0x72
3584 * @opfltest cf
3585 */
3586FNIEMOP_DEF(iemOp_jc_Jb)
3587{
3588 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3589 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3590 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3591
3592 IEM_MC_BEGIN(0, 0, 0, 0);
3593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3594 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3595 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3596 } IEM_MC_ELSE() {
3597 IEM_MC_ADVANCE_RIP_AND_FINISH();
3598 } IEM_MC_ENDIF();
3599 IEM_MC_END();
3600}
3601
3602
3603/**
3604 * @opcode 0x73
3605 * @opfltest cf
3606 */
3607FNIEMOP_DEF(iemOp_jnc_Jb)
3608{
3609 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3610 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3611 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3612
3613 IEM_MC_BEGIN(0, 0, 0, 0);
3614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3615 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3616 IEM_MC_ADVANCE_RIP_AND_FINISH();
3617 } IEM_MC_ELSE() {
3618 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3619 } IEM_MC_ENDIF();
3620 IEM_MC_END();
3621}
3622
3623
3624/**
3625 * @opcode 0x74
3626 * @opfltest zf
3627 */
3628FNIEMOP_DEF(iemOp_je_Jb)
3629{
3630 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3631 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3632 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3633
3634 IEM_MC_BEGIN(0, 0, 0, 0);
3635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3636 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3637 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3638 } IEM_MC_ELSE() {
3639 IEM_MC_ADVANCE_RIP_AND_FINISH();
3640 } IEM_MC_ENDIF();
3641 IEM_MC_END();
3642}
3643
3644
3645/**
3646 * @opcode 0x75
3647 * @opfltest zf
3648 */
3649FNIEMOP_DEF(iemOp_jne_Jb)
3650{
3651 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3652 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3653 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3654
3655 IEM_MC_BEGIN(0, 0, 0, 0);
3656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3657 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3658 IEM_MC_ADVANCE_RIP_AND_FINISH();
3659 } IEM_MC_ELSE() {
3660 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3661 } IEM_MC_ENDIF();
3662 IEM_MC_END();
3663}
3664
3665
3666/**
3667 * @opcode 0x76
3668 * @opfltest cf,zf
3669 */
3670FNIEMOP_DEF(iemOp_jbe_Jb)
3671{
3672 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3673 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3674 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3675
3676 IEM_MC_BEGIN(0, 0, 0, 0);
3677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3678 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3679 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3680 } IEM_MC_ELSE() {
3681 IEM_MC_ADVANCE_RIP_AND_FINISH();
3682 } IEM_MC_ENDIF();
3683 IEM_MC_END();
3684}
3685
3686
3687/**
3688 * @opcode 0x77
3689 * @opfltest cf,zf
3690 */
3691FNIEMOP_DEF(iemOp_jnbe_Jb)
3692{
3693 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3694 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3695 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3696
3697 IEM_MC_BEGIN(0, 0, 0, 0);
3698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3699 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3700 IEM_MC_ADVANCE_RIP_AND_FINISH();
3701 } IEM_MC_ELSE() {
3702 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3703 } IEM_MC_ENDIF();
3704 IEM_MC_END();
3705}
3706
3707
3708/**
3709 * @opcode 0x78
3710 * @opfltest sf
3711 */
3712FNIEMOP_DEF(iemOp_js_Jb)
3713{
3714 IEMOP_MNEMONIC(js_Jb, "js Jb");
3715 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3716 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3717
3718 IEM_MC_BEGIN(0, 0, 0, 0);
3719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3720 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3721 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3722 } IEM_MC_ELSE() {
3723 IEM_MC_ADVANCE_RIP_AND_FINISH();
3724 } IEM_MC_ENDIF();
3725 IEM_MC_END();
3726}
3727
3728
3729/**
3730 * @opcode 0x79
3731 * @opfltest sf
3732 */
3733FNIEMOP_DEF(iemOp_jns_Jb)
3734{
3735 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3736 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3737 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3738
3739 IEM_MC_BEGIN(0, 0, 0, 0);
3740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3741 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3742 IEM_MC_ADVANCE_RIP_AND_FINISH();
3743 } IEM_MC_ELSE() {
3744 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3745 } IEM_MC_ENDIF();
3746 IEM_MC_END();
3747}
3748
3749
3750/**
3751 * @opcode 0x7a
3752 * @opfltest pf
3753 */
3754FNIEMOP_DEF(iemOp_jp_Jb)
3755{
3756 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3757 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3758 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3759
3760 IEM_MC_BEGIN(0, 0, 0, 0);
3761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3762 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3763 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3764 } IEM_MC_ELSE() {
3765 IEM_MC_ADVANCE_RIP_AND_FINISH();
3766 } IEM_MC_ENDIF();
3767 IEM_MC_END();
3768}
3769
3770
3771/**
3772 * @opcode 0x7b
3773 * @opfltest pf
3774 */
3775FNIEMOP_DEF(iemOp_jnp_Jb)
3776{
3777 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3778 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3779 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3780
3781 IEM_MC_BEGIN(0, 0, 0, 0);
3782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3783 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3784 IEM_MC_ADVANCE_RIP_AND_FINISH();
3785 } IEM_MC_ELSE() {
3786 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3787 } IEM_MC_ENDIF();
3788 IEM_MC_END();
3789}
3790
3791
3792/**
3793 * @opcode 0x7c
3794 * @opfltest sf,of
3795 */
3796FNIEMOP_DEF(iemOp_jl_Jb)
3797{
3798 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3799 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3800 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3801
3802 IEM_MC_BEGIN(0, 0, 0, 0);
3803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3804 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3805 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3806 } IEM_MC_ELSE() {
3807 IEM_MC_ADVANCE_RIP_AND_FINISH();
3808 } IEM_MC_ENDIF();
3809 IEM_MC_END();
3810}
3811
3812
3813/**
3814 * @opcode 0x7d
3815 * @opfltest sf,of
3816 */
3817FNIEMOP_DEF(iemOp_jnl_Jb)
3818{
3819 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3820 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3821 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3822
3823 IEM_MC_BEGIN(0, 0, 0, 0);
3824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3825 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3826 IEM_MC_ADVANCE_RIP_AND_FINISH();
3827 } IEM_MC_ELSE() {
3828 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3829 } IEM_MC_ENDIF();
3830 IEM_MC_END();
3831}
3832
3833
3834/**
3835 * @opcode 0x7e
3836 * @opfltest zf,sf,of
3837 */
3838FNIEMOP_DEF(iemOp_jle_Jb)
3839{
3840 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3841 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3842 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3843
3844 IEM_MC_BEGIN(0, 0, 0, 0);
3845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3846 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3847 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3848 } IEM_MC_ELSE() {
3849 IEM_MC_ADVANCE_RIP_AND_FINISH();
3850 } IEM_MC_ENDIF();
3851 IEM_MC_END();
3852}
3853
3854
3855/**
3856 * @opcode 0x7f
3857 * @opfltest zf,sf,of
3858 */
3859FNIEMOP_DEF(iemOp_jnle_Jb)
3860{
3861 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3862 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3863 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3864
3865 IEM_MC_BEGIN(0, 0, 0, 0);
3866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3867 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3868 IEM_MC_ADVANCE_RIP_AND_FINISH();
3869 } IEM_MC_ELSE() {
3870 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3871 } IEM_MC_ENDIF();
3872 IEM_MC_END();
3873}
3874
3875
3876/**
3877 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3878 * iemOp_Grp1_Eb_Ib_80.
3879 */
3880#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
3881 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3882 { \
3883 /* register target */ \
3884 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3885 IEM_MC_BEGIN(3, 0, 0, 0); \
3886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3887 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3888 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3889 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3890 \
3891 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3892 IEM_MC_REF_EFLAGS(pEFlags); \
3893 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3894 \
3895 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3896 IEM_MC_END(); \
3897 } \
3898 else \
3899 { \
3900 /* memory target */ \
3901 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
3902 { \
3903 IEM_MC_BEGIN(3, 3, 0, 0); \
3904 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3905 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3907 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3908 \
3909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3910 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3911 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3912 IEMOP_HLP_DONE_DECODING(); \
3913 \
3914 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3915 IEM_MC_FETCH_EFLAGS(EFlags); \
3916 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3917 \
3918 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
3919 IEM_MC_COMMIT_EFLAGS(EFlags); \
3920 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3921 IEM_MC_END(); \
3922 } \
3923 else \
3924 { \
3925 (void)0
3926
3927#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3928 IEM_MC_BEGIN(3, 3, 0, 0); \
3929 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3930 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3932 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3933 \
3934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3935 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3936 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3937 IEMOP_HLP_DONE_DECODING(); \
3938 \
3939 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3940 IEM_MC_FETCH_EFLAGS(EFlags); \
3941 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
3942 \
3943 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
3944 IEM_MC_COMMIT_EFLAGS(EFlags); \
3945 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3946 IEM_MC_END(); \
3947 } \
3948 } \
3949 (void)0
3950
3951#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
3952 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3953 { \
3954 /* register target */ \
3955 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3956 IEM_MC_BEGIN(3, 0, 0, 0); \
3957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3958 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3959 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3960 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3961 \
3962 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3963 IEM_MC_REF_EFLAGS(pEFlags); \
3964 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3965 \
3966 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3967 IEM_MC_END(); \
3968 } \
3969 else \
3970 { \
3971 /* memory target */ \
3972 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
3973 { \
3974 IEM_MC_BEGIN(3, 3, 0, 0); \
3975 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
3976 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3978 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3979 \
3980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3981 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3982 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3983 IEMOP_HLP_DONE_DECODING(); \
3984 \
3985 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3986 IEM_MC_FETCH_EFLAGS(EFlags); \
3987 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3988 \
3989 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
3990 IEM_MC_COMMIT_EFLAGS(EFlags); \
3991 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3992 IEM_MC_END(); \
3993 } \
3994 else \
3995 { \
3996 (void)0
3997
3998#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
3999 IEMOP_HLP_DONE_DECODING(); \
4000 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4001 } \
4002 } \
4003 (void)0
4004
4005
4006
4007/**
4008 * @opmaps grp1_80,grp1_83
4009 * @opcode /0
4010 * @opflclass arithmetic
4011 */
4012FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
4013{
4014 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
4015 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
4016 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
4017}
4018
4019
4020/**
4021 * @opmaps grp1_80,grp1_83
4022 * @opcode /1
4023 * @opflclass logical
4024 */
4025FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
4026{
4027 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
4028 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
4029 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
4030}
4031
4032
4033/**
4034 * @opmaps grp1_80,grp1_83
4035 * @opcode /2
4036 * @opflclass arithmetic_carry
4037 */
4038FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4039{
4040 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4041 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4042 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4043}
4044
4045
4046/**
4047 * @opmaps grp1_80,grp1_83
4048 * @opcode /3
4049 * @opflclass arithmetic_carry
4050 */
4051FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4052{
4053 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4054 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4055 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4056}
4057
4058
4059/**
4060 * @opmaps grp1_80,grp1_83
4061 * @opcode /4
4062 * @opflclass logical
4063 */
4064FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4065{
4066 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4067 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4068 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4069}
4070
4071
4072/**
4073 * @opmaps grp1_80,grp1_83
4074 * @opcode /5
4075 * @opflclass arithmetic
4076 */
4077FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4078{
4079 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4080 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4081 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4082}
4083
4084
4085/**
4086 * @opmaps grp1_80,grp1_83
4087 * @opcode /6
4088 * @opflclass logical
4089 */
4090FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4091{
4092 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4093 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4094 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4095}
4096
4097
4098/**
4099 * @opmaps grp1_80,grp1_83
4100 * @opcode /7
4101 * @opflclass arithmetic
4102 */
4103FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4104{
4105 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4106 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4107 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4108}
4109
4110
4111/**
4112 * @opcode 0x80
4113 */
4114FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4115{
4116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4117 switch (IEM_GET_MODRM_REG_8(bRm))
4118 {
4119 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4120 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4121 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4122 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4123 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4124 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4125 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4126 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4128 }
4129}
4130
4131
4132/**
4133 * Body for a group 1 binary operator.
4134 */
4135#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4136 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4137 { \
4138 /* register target */ \
4139 switch (pVCpu->iem.s.enmEffOpSize) \
4140 { \
4141 case IEMMODE_16BIT: \
4142 { \
4143 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4144 IEM_MC_BEGIN(3, 0, 0, 0); \
4145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4146 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4147 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4148 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4149 \
4150 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4151 IEM_MC_REF_EFLAGS(pEFlags); \
4152 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4153 \
4154 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4155 IEM_MC_END(); \
4156 break; \
4157 } \
4158 \
4159 case IEMMODE_32BIT: \
4160 { \
4161 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4162 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4164 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4165 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4166 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4167 \
4168 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4169 IEM_MC_REF_EFLAGS(pEFlags); \
4170 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4171 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4172 \
4173 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4174 IEM_MC_END(); \
4175 break; \
4176 } \
4177 \
4178 case IEMMODE_64BIT: \
4179 { \
4180 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4181 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4183 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4184 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4185 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4186 \
4187 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4188 IEM_MC_REF_EFLAGS(pEFlags); \
4189 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4190 \
4191 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4192 IEM_MC_END(); \
4193 break; \
4194 } \
4195 \
4196 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4197 } \
4198 } \
4199 else \
4200 { \
4201 /* memory target */ \
4202 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4203 { \
4204 switch (pVCpu->iem.s.enmEffOpSize) \
4205 { \
4206 case IEMMODE_16BIT: \
4207 { \
4208 IEM_MC_BEGIN(3, 3, 0, 0); \
4209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4211 \
4212 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4213 IEMOP_HLP_DONE_DECODING(); \
4214 \
4215 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4216 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4217 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4218 \
4219 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4220 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4221 IEM_MC_FETCH_EFLAGS(EFlags); \
4222 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4223 \
4224 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4225 IEM_MC_COMMIT_EFLAGS(EFlags); \
4226 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4227 IEM_MC_END(); \
4228 break; \
4229 } \
4230 \
4231 case IEMMODE_32BIT: \
4232 { \
4233 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4236 \
4237 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4238 IEMOP_HLP_DONE_DECODING(); \
4239 \
4240 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4241 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4242 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4243 \
4244 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4245 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4246 IEM_MC_FETCH_EFLAGS(EFlags); \
4247 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4248 \
4249 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4250 IEM_MC_COMMIT_EFLAGS(EFlags); \
4251 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4252 IEM_MC_END(); \
4253 break; \
4254 } \
4255 \
4256 case IEMMODE_64BIT: \
4257 { \
4258 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4259 \
4260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4262 \
4263 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4264 IEMOP_HLP_DONE_DECODING(); \
4265 \
4266 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4267 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4268 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4269 \
4270 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4271 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4272 IEM_MC_FETCH_EFLAGS(EFlags); \
4273 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4274 \
4275 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4276 IEM_MC_COMMIT_EFLAGS(EFlags); \
4277 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4278 IEM_MC_END(); \
4279 break; \
4280 } \
4281 \
4282 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4283 } \
4284 } \
4285 else \
4286 { \
4287 (void)0
4288/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4289#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4290 switch (pVCpu->iem.s.enmEffOpSize) \
4291 { \
4292 case IEMMODE_16BIT: \
4293 { \
4294 IEM_MC_BEGIN(3, 3, 0, 0); \
4295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4297 \
4298 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4299 IEMOP_HLP_DONE_DECODING(); \
4300 \
4301 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4302 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4303 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4304 \
4305 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4306 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4307 IEM_MC_FETCH_EFLAGS(EFlags); \
4308 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4309 \
4310 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4311 IEM_MC_COMMIT_EFLAGS(EFlags); \
4312 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4313 IEM_MC_END(); \
4314 break; \
4315 } \
4316 \
4317 case IEMMODE_32BIT: \
4318 { \
4319 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4322 \
4323 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4324 IEMOP_HLP_DONE_DECODING(); \
4325 \
4326 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4327 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4328 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4329 \
4330 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4331 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4332 IEM_MC_FETCH_EFLAGS(EFlags); \
4333 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4334 \
4335 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4336 IEM_MC_COMMIT_EFLAGS(EFlags); \
4337 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4338 IEM_MC_END(); \
4339 break; \
4340 } \
4341 \
4342 case IEMMODE_64BIT: \
4343 { \
4344 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4347 \
4348 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4349 IEMOP_HLP_DONE_DECODING(); \
4350 \
4351 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4352 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4353 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4354 \
4355 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4356 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4357 IEM_MC_FETCH_EFLAGS(EFlags); \
4358 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4359 \
4360 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4361 IEM_MC_COMMIT_EFLAGS(EFlags); \
4362 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4363 IEM_MC_END(); \
4364 break; \
4365 } \
4366 \
4367 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4368 } \
4369 } \
4370 } \
4371 (void)0
4372
4373/* read-only version */
4374#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4375 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4376 { \
4377 /* register target */ \
4378 switch (pVCpu->iem.s.enmEffOpSize) \
4379 { \
4380 case IEMMODE_16BIT: \
4381 { \
4382 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4383 IEM_MC_BEGIN(3, 0, 0, 0); \
4384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4385 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4386 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4387 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4388 \
4389 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4390 IEM_MC_REF_EFLAGS(pEFlags); \
4391 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4392 \
4393 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4394 IEM_MC_END(); \
4395 break; \
4396 } \
4397 \
4398 case IEMMODE_32BIT: \
4399 { \
4400 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4401 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4403 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4404 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4405 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4406 \
4407 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4408 IEM_MC_REF_EFLAGS(pEFlags); \
4409 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4410 \
4411 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4412 IEM_MC_END(); \
4413 break; \
4414 } \
4415 \
4416 case IEMMODE_64BIT: \
4417 { \
4418 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4419 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4421 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4422 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4423 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4424 \
4425 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4426 IEM_MC_REF_EFLAGS(pEFlags); \
4427 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4428 \
4429 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4430 IEM_MC_END(); \
4431 break; \
4432 } \
4433 \
4434 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4435 } \
4436 } \
4437 else \
4438 { \
4439 /* memory target */ \
4440 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4441 { \
4442 switch (pVCpu->iem.s.enmEffOpSize) \
4443 { \
4444 case IEMMODE_16BIT: \
4445 { \
4446 IEM_MC_BEGIN(3, 3, 0, 0); \
4447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4449 \
4450 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4451 IEMOP_HLP_DONE_DECODING(); \
4452 \
4453 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4454 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4455 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4456 \
4457 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4458 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4459 IEM_MC_FETCH_EFLAGS(EFlags); \
4460 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4461 \
4462 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4463 IEM_MC_COMMIT_EFLAGS(EFlags); \
4464 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4465 IEM_MC_END(); \
4466 break; \
4467 } \
4468 \
4469 case IEMMODE_32BIT: \
4470 { \
4471 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4474 \
4475 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4476 IEMOP_HLP_DONE_DECODING(); \
4477 \
4478 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4479 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4480 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4481 \
4482 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4483 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4484 IEM_MC_FETCH_EFLAGS(EFlags); \
4485 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4486 \
4487 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4488 IEM_MC_COMMIT_EFLAGS(EFlags); \
4489 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4490 IEM_MC_END(); \
4491 break; \
4492 } \
4493 \
4494 case IEMMODE_64BIT: \
4495 { \
4496 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4499 \
4500 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4501 IEMOP_HLP_DONE_DECODING(); \
4502 \
4503 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4504 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4505 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4506 \
4507 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4508 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4509 IEM_MC_FETCH_EFLAGS(EFlags); \
4510 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4511 \
4512 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4513 IEM_MC_COMMIT_EFLAGS(EFlags); \
4514 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4515 IEM_MC_END(); \
4516 break; \
4517 } \
4518 \
4519 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4520 } \
4521 } \
4522 else \
4523 { \
4524 IEMOP_HLP_DONE_DECODING(); \
4525 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4526 } \
4527 } \
4528 (void)0
4529
4530
4531/**
4532 * @opmaps grp1_81
4533 * @opcode /0
4534 * @opflclass arithmetic
4535 */
4536FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4537{
4538 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4539 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4540 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4541}
4542
4543
4544/**
4545 * @opmaps grp1_81
4546 * @opcode /1
4547 * @opflclass logical
4548 */
4549FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4550{
4551 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4552 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4553 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4554}
4555
4556
4557/**
4558 * @opmaps grp1_81
4559 * @opcode /2
4560 * @opflclass arithmetic_carry
4561 */
4562FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4563{
4564 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4565 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4566 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4567}
4568
4569
4570/**
4571 * @opmaps grp1_81
4572 * @opcode /3
4573 * @opflclass arithmetic_carry
4574 */
4575FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4576{
4577 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4578 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4579 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4580}
4581
4582
4583/**
4584 * @opmaps grp1_81
4585 * @opcode /4
4586 * @opflclass logical
4587 */
4588FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4589{
4590 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4591 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4592 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4593}
4594
4595
4596/**
4597 * @opmaps grp1_81
4598 * @opcode /5
4599 * @opflclass arithmetic
4600 */
4601FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4602{
4603 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4604 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4605 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4606}
4607
4608
4609/**
4610 * @opmaps grp1_81
4611 * @opcode /6
4612 * @opflclass logical
4613 */
4614FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4615{
4616 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4617 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4618 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4619}
4620
4621
4622/**
4623 * @opmaps grp1_81
4624 * @opcode /7
4625 * @opflclass arithmetic
4626 */
4627FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4628{
4629 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4630 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4631}
4632
4633
4634/**
4635 * @opcode 0x81
4636 */
4637FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4638{
4639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4640 switch (IEM_GET_MODRM_REG_8(bRm))
4641 {
4642 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4643 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4644 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4645 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4646 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4647 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4648 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4649 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4650 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4651 }
4652}
4653
4654
4655/**
4656 * @opcode 0x82
4657 * @opmnemonic grp1_82
4658 * @opgroup og_groups
4659 */
4660FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4661{
4662 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4663 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4664}
4665
4666
4667/**
4668 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4669 * iemOp_Grp1_Ev_Ib.
4670 */
4671#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4672 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4673 { \
4674 /* \
4675 * Register target \
4676 */ \
4677 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4678 switch (pVCpu->iem.s.enmEffOpSize) \
4679 { \
4680 case IEMMODE_16BIT: \
4681 IEM_MC_BEGIN(3, 0, 0, 0); \
4682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4683 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4684 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
4685 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4686 \
4687 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4688 IEM_MC_REF_EFLAGS(pEFlags); \
4689 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4690 \
4691 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4692 IEM_MC_END(); \
4693 break; \
4694 \
4695 case IEMMODE_32BIT: \
4696 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4698 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4699 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
4700 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4701 \
4702 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4703 IEM_MC_REF_EFLAGS(pEFlags); \
4704 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4705 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4706 \
4707 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4708 IEM_MC_END(); \
4709 break; \
4710 \
4711 case IEMMODE_64BIT: \
4712 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4714 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4715 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
4716 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4717 \
4718 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4719 IEM_MC_REF_EFLAGS(pEFlags); \
4720 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4721 \
4722 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4723 IEM_MC_END(); \
4724 break; \
4725 \
4726 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4727 } \
4728 } \
4729 else \
4730 { \
4731 /* \
4732 * Memory target. \
4733 */ \
4734 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4735 { \
4736 switch (pVCpu->iem.s.enmEffOpSize) \
4737 { \
4738 case IEMMODE_16BIT: \
4739 IEM_MC_BEGIN(3, 3, 0, 0); \
4740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4742 \
4743 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4744 IEMOP_HLP_DONE_DECODING(); \
4745 \
4746 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4747 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4748 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4749 \
4750 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
4751 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4752 IEM_MC_FETCH_EFLAGS(EFlags); \
4753 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4754 \
4755 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4756 IEM_MC_COMMIT_EFLAGS(EFlags); \
4757 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4758 IEM_MC_END(); \
4759 break; \
4760 \
4761 case IEMMODE_32BIT: \
4762 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4765 \
4766 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4767 IEMOP_HLP_DONE_DECODING(); \
4768 \
4769 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4770 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4771 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4772 \
4773 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
4774 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4775 IEM_MC_FETCH_EFLAGS(EFlags); \
4776 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4777 \
4778 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4779 IEM_MC_COMMIT_EFLAGS(EFlags); \
4780 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4781 IEM_MC_END(); \
4782 break; \
4783 \
4784 case IEMMODE_64BIT: \
4785 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4788 \
4789 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4790 IEMOP_HLP_DONE_DECODING(); \
4791 \
4792 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4793 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4794 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4795 \
4796 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
4797 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4798 IEM_MC_FETCH_EFLAGS(EFlags); \
4799 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4800 \
4801 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4802 IEM_MC_COMMIT_EFLAGS(EFlags); \
4803 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4804 IEM_MC_END(); \
4805 break; \
4806 \
4807 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4808 } \
4809 } \
4810 else \
4811 { \
4812 (void)0
4813/* Separate macro to work around parsing issue in IEMAllInstPython.py */
4814#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4815 switch (pVCpu->iem.s.enmEffOpSize) \
4816 { \
4817 case IEMMODE_16BIT: \
4818 IEM_MC_BEGIN(3, 3, 0, 0); \
4819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4821 \
4822 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4823 IEMOP_HLP_DONE_DECODING(); \
4824 \
4825 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4826 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4827 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4828 \
4829 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
4830 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4831 IEM_MC_FETCH_EFLAGS(EFlags); \
4832 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4833 \
4834 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4835 IEM_MC_COMMIT_EFLAGS(EFlags); \
4836 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4837 IEM_MC_END(); \
4838 break; \
4839 \
4840 case IEMMODE_32BIT: \
4841 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4844 \
4845 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4846 IEMOP_HLP_DONE_DECODING(); \
4847 \
4848 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4849 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4850 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4851 \
4852 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
4853 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4854 IEM_MC_FETCH_EFLAGS(EFlags); \
4855 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4856 \
4857 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4858 IEM_MC_COMMIT_EFLAGS(EFlags); \
4859 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4860 IEM_MC_END(); \
4861 break; \
4862 \
4863 case IEMMODE_64BIT: \
4864 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4867 \
4868 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4869 IEMOP_HLP_DONE_DECODING(); \
4870 \
4871 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4872 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4873 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4874 \
4875 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
4876 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4877 IEM_MC_FETCH_EFLAGS(EFlags); \
4878 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4879 \
4880 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4881 IEM_MC_COMMIT_EFLAGS(EFlags); \
4882 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4883 IEM_MC_END(); \
4884 break; \
4885 \
4886 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4887 } \
4888 } \
4889 } \
4890 (void)0
4891
4892/* read-only variant */
4893#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4894 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4895 { \
4896 /* \
4897 * Register target \
4898 */ \
4899 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4900 switch (pVCpu->iem.s.enmEffOpSize) \
4901 { \
4902 case IEMMODE_16BIT: \
4903 IEM_MC_BEGIN(3, 0, 0, 0); \
4904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4905 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4906 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
4907 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4908 \
4909 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4910 IEM_MC_REF_EFLAGS(pEFlags); \
4911 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4912 \
4913 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4914 IEM_MC_END(); \
4915 break; \
4916 \
4917 case IEMMODE_32BIT: \
4918 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4920 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4921 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
4922 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4923 \
4924 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4925 IEM_MC_REF_EFLAGS(pEFlags); \
4926 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4927 \
4928 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4929 IEM_MC_END(); \
4930 break; \
4931 \
4932 case IEMMODE_64BIT: \
4933 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4935 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4936 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
4937 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4938 \
4939 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4940 IEM_MC_REF_EFLAGS(pEFlags); \
4941 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4942 \
4943 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4944 IEM_MC_END(); \
4945 break; \
4946 \
4947 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4948 } \
4949 } \
4950 else \
4951 { \
4952 /* \
4953 * Memory target. \
4954 */ \
4955 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4956 { \
4957 switch (pVCpu->iem.s.enmEffOpSize) \
4958 { \
4959 case IEMMODE_16BIT: \
4960 IEM_MC_BEGIN(3, 3, 0, 0); \
4961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4963 \
4964 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4965 IEMOP_HLP_DONE_DECODING(); \
4966 \
4967 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4968 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4969 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4970 \
4971 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
4972 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4973 IEM_MC_FETCH_EFLAGS(EFlags); \
4974 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4975 \
4976 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4977 IEM_MC_COMMIT_EFLAGS(EFlags); \
4978 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4979 IEM_MC_END(); \
4980 break; \
4981 \
4982 case IEMMODE_32BIT: \
4983 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4984 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4986 \
4987 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4988 IEMOP_HLP_DONE_DECODING(); \
4989 \
4990 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4991 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4992 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4993 \
4994 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
4995 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4996 IEM_MC_FETCH_EFLAGS(EFlags); \
4997 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4998 \
4999 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5000 IEM_MC_COMMIT_EFLAGS(EFlags); \
5001 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5002 IEM_MC_END(); \
5003 break; \
5004 \
5005 case IEMMODE_64BIT: \
5006 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5009 \
5010 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5011 IEMOP_HLP_DONE_DECODING(); \
5012 \
5013 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5014 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
5015 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5016 \
5017 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5018 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5019 IEM_MC_FETCH_EFLAGS(EFlags); \
5020 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5021 \
5022 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5023 IEM_MC_COMMIT_EFLAGS(EFlags); \
5024 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5025 IEM_MC_END(); \
5026 break; \
5027 \
5028 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5029 } \
5030 } \
5031 else \
5032 { \
5033 IEMOP_HLP_DONE_DECODING(); \
5034 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5035 } \
5036 } \
5037 (void)0
5038
5039/**
5040 * @opmaps grp1_83
5041 * @opcode /0
5042 * @opflclass arithmetic
5043 */
5044FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5045{
5046 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5047 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
5048 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
5049}
5050
5051
5052/**
5053 * @opmaps grp1_83
5054 * @opcode /1
5055 * @opflclass logical
5056 */
5057FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5058{
5059 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5060 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
5061 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
5062}
5063
5064
5065/**
5066 * @opmaps grp1_83
5067 * @opcode /2
5068 * @opflclass arithmetic_carry
5069 */
5070FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5071{
5072 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5073 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5074 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5075}
5076
5077
5078/**
5079 * @opmaps grp1_83
5080 * @opcode /3
5081 * @opflclass arithmetic_carry
5082 */
5083FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5084{
5085 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5086 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5087 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5088}
5089
5090
5091/**
5092 * @opmaps grp1_83
5093 * @opcode /4
5094 * @opflclass logical
5095 */
5096FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5097{
5098 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5099 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5100 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5101}
5102
5103
5104/**
5105 * @opmaps grp1_83
5106 * @opcode /5
5107 * @opflclass arithmetic
5108 */
5109FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5110{
5111 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5112 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5113 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5114}
5115
5116
5117/**
5118 * @opmaps grp1_83
5119 * @opcode /6
5120 * @opflclass logical
5121 */
5122FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5123{
5124 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5125 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5126 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5127}
5128
5129
5130/**
5131 * @opmaps grp1_83
5132 * @opcode /7
5133 * @opflclass arithmetic
5134 */
5135FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5136{
5137 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5138 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5139}
5140
5141
5142/**
5143 * @opcode 0x83
5144 */
5145FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5146{
5147 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5148 to the 386 even if absent in the intel reference manuals and some
5149 3rd party opcode listings. */
5150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5151 switch (IEM_GET_MODRM_REG_8(bRm))
5152 {
5153 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5154 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5155 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5156 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5157 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5158 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5159 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5160 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5162 }
5163}
5164
5165
5166/**
5167 * @opcode 0x84
5168 * @opflclass logical
5169 */
5170FNIEMOP_DEF(iemOp_test_Eb_Gb)
5171{
5172 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5173 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5174 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8);
5175}
5176
5177
5178/**
5179 * @opcode 0x85
5180 * @opflclass logical
5181 */
5182FNIEMOP_DEF(iemOp_test_Ev_Gv)
5183{
5184 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5185 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5186 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64);
5187}
5188
5189
5190/**
5191 * @opcode 0x86
5192 */
5193FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5194{
5195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5196 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5197
5198 /*
5199 * If rm is denoting a register, no more instruction bytes.
5200 */
5201 if (IEM_IS_MODRM_REG_MODE(bRm))
5202 {
5203 IEM_MC_BEGIN(0, 2, 0, 0);
5204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5205 IEM_MC_LOCAL(uint8_t, uTmp1);
5206 IEM_MC_LOCAL(uint8_t, uTmp2);
5207
5208 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5209 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5210 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5211 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5212
5213 IEM_MC_ADVANCE_RIP_AND_FINISH();
5214 IEM_MC_END();
5215 }
5216 else
5217 {
5218 /*
5219 * We're accessing memory.
5220 */
5221#define IEMOP_XCHG_BYTE(a_fnWorker, a_Style) \
5222 IEM_MC_BEGIN(2, 4, 0, 0); \
5223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5224 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5225 IEM_MC_LOCAL(uint8_t, uTmpReg); \
5226 IEM_MC_ARG(uint8_t *, pu8Mem, 0); \
5227 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1); \
5228 \
5229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5230 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5231 IEM_MC_MEM_MAP_U8_##a_Style(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5232 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5233 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker, pu8Mem, pu8Reg); \
5234 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Style(bUnmapInfo); \
5235 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5236 \
5237 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5238 IEM_MC_END()
5239
5240 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5241 {
5242 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_locked,ATOMIC);
5243 }
5244 else
5245 {
5246 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_unlocked,RW);
5247 }
5248 }
5249}
5250
5251
5252/**
5253 * @opcode 0x87
5254 */
5255FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5256{
5257 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5258 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5259
5260 /*
5261 * If rm is denoting a register, no more instruction bytes.
5262 */
5263 if (IEM_IS_MODRM_REG_MODE(bRm))
5264 {
5265 switch (pVCpu->iem.s.enmEffOpSize)
5266 {
5267 case IEMMODE_16BIT:
5268 IEM_MC_BEGIN(0, 2, 0, 0);
5269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5270 IEM_MC_LOCAL(uint16_t, uTmp1);
5271 IEM_MC_LOCAL(uint16_t, uTmp2);
5272
5273 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5274 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5275 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5276 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5277
5278 IEM_MC_ADVANCE_RIP_AND_FINISH();
5279 IEM_MC_END();
5280 break;
5281
5282 case IEMMODE_32BIT:
5283 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5285 IEM_MC_LOCAL(uint32_t, uTmp1);
5286 IEM_MC_LOCAL(uint32_t, uTmp2);
5287
5288 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5289 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5290 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5291 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5292
5293 IEM_MC_ADVANCE_RIP_AND_FINISH();
5294 IEM_MC_END();
5295 break;
5296
5297 case IEMMODE_64BIT:
5298 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5300 IEM_MC_LOCAL(uint64_t, uTmp1);
5301 IEM_MC_LOCAL(uint64_t, uTmp2);
5302
5303 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5304 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5305 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5306 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5307
5308 IEM_MC_ADVANCE_RIP_AND_FINISH();
5309 IEM_MC_END();
5310 break;
5311
5312 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5313 }
5314 }
5315 else
5316 {
5317 /*
5318 * We're accessing memory.
5319 */
5320#define IEMOP_XCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
5321 do { \
5322 switch (pVCpu->iem.s.enmEffOpSize) \
5323 { \
5324 case IEMMODE_16BIT: \
5325 IEM_MC_BEGIN(2, 4, 0, 0); \
5326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5327 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5328 IEM_MC_LOCAL(uint16_t, uTmpReg); \
5329 IEM_MC_ARG(uint16_t *, pu16Mem, 0); \
5330 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1); \
5331 \
5332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5333 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5334 IEM_MC_MEM_MAP_U16_##a_Type(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5335 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5336 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker16, pu16Mem, pu16Reg); \
5337 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5338 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5339 \
5340 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5341 IEM_MC_END(); \
5342 break; \
5343 \
5344 case IEMMODE_32BIT: \
5345 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0); \
5346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5347 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5348 IEM_MC_LOCAL(uint32_t, uTmpReg); \
5349 IEM_MC_ARG(uint32_t *, pu32Mem, 0); \
5350 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1); \
5351 \
5352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5353 IEMOP_HLP_DONE_DECODING(); \
5354 IEM_MC_MEM_MAP_U32_##a_Type(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5355 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5356 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker32, pu32Mem, pu32Reg); \
5357 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5358 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5359 \
5360 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5361 IEM_MC_END(); \
5362 break; \
5363 \
5364 case IEMMODE_64BIT: \
5365 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0); \
5366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5367 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5368 IEM_MC_LOCAL(uint64_t, uTmpReg); \
5369 IEM_MC_ARG(uint64_t *, pu64Mem, 0); \
5370 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1); \
5371 \
5372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5373 IEMOP_HLP_DONE_DECODING(); \
5374 IEM_MC_MEM_MAP_U64_##a_Type(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5375 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5376 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker64, pu64Mem, pu64Reg); \
5377 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5378 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5379 \
5380 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5381 IEM_MC_END(); \
5382 break; \
5383 \
5384 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5385 } \
5386 } while (0)
5387 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5388 {
5389 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_locked, iemAImpl_xchg_u32_locked, iemAImpl_xchg_u64_locked,ATOMIC);
5390 }
5391 else
5392 {
5393 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_unlocked, iemAImpl_xchg_u32_unlocked, iemAImpl_xchg_u64_unlocked,RW);
5394 }
5395 }
5396}
5397
5398
5399/**
5400 * @opcode 0x88
5401 */
5402FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5403{
5404 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5405
5406 uint8_t bRm;
5407 IEM_OPCODE_GET_NEXT_U8(&bRm);
5408
5409 /*
5410 * If rm is denoting a register, no more instruction bytes.
5411 */
5412 if (IEM_IS_MODRM_REG_MODE(bRm))
5413 {
5414 IEM_MC_BEGIN(0, 1, 0, 0);
5415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5416 IEM_MC_LOCAL(uint8_t, u8Value);
5417 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5418 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5419 IEM_MC_ADVANCE_RIP_AND_FINISH();
5420 IEM_MC_END();
5421 }
5422 else
5423 {
5424 /*
5425 * We're writing a register to memory.
5426 */
5427 IEM_MC_BEGIN(0, 2, 0, 0);
5428 IEM_MC_LOCAL(uint8_t, u8Value);
5429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5432 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5433 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5434 IEM_MC_ADVANCE_RIP_AND_FINISH();
5435 IEM_MC_END();
5436 }
5437}
5438
5439
5440/**
5441 * @opcode 0x89
5442 */
5443FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5444{
5445 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5446
5447 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5448
5449 /*
5450 * If rm is denoting a register, no more instruction bytes.
5451 */
5452 if (IEM_IS_MODRM_REG_MODE(bRm))
5453 {
5454 switch (pVCpu->iem.s.enmEffOpSize)
5455 {
5456 case IEMMODE_16BIT:
5457 IEM_MC_BEGIN(0, 1, 0, 0);
5458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5459 IEM_MC_LOCAL(uint16_t, u16Value);
5460 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5461 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5462 IEM_MC_ADVANCE_RIP_AND_FINISH();
5463 IEM_MC_END();
5464 break;
5465
5466 case IEMMODE_32BIT:
5467 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5469 IEM_MC_LOCAL(uint32_t, u32Value);
5470 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5471 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5472 IEM_MC_ADVANCE_RIP_AND_FINISH();
5473 IEM_MC_END();
5474 break;
5475
5476 case IEMMODE_64BIT:
5477 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5479 IEM_MC_LOCAL(uint64_t, u64Value);
5480 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5481 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5482 IEM_MC_ADVANCE_RIP_AND_FINISH();
5483 IEM_MC_END();
5484 break;
5485
5486 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5487 }
5488 }
5489 else
5490 {
5491 /*
5492 * We're writing a register to memory.
5493 */
5494 switch (pVCpu->iem.s.enmEffOpSize)
5495 {
5496 case IEMMODE_16BIT:
5497 IEM_MC_BEGIN(0, 2, 0, 0);
5498 IEM_MC_LOCAL(uint16_t, u16Value);
5499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5502 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5503 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5504 IEM_MC_ADVANCE_RIP_AND_FINISH();
5505 IEM_MC_END();
5506 break;
5507
5508 case IEMMODE_32BIT:
5509 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5510 IEM_MC_LOCAL(uint32_t, u32Value);
5511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5514 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5515 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5516 IEM_MC_ADVANCE_RIP_AND_FINISH();
5517 IEM_MC_END();
5518 break;
5519
5520 case IEMMODE_64BIT:
5521 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5522 IEM_MC_LOCAL(uint64_t, u64Value);
5523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5526 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5527 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5528 IEM_MC_ADVANCE_RIP_AND_FINISH();
5529 IEM_MC_END();
5530 break;
5531
5532 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5533 }
5534 }
5535}
5536
5537
5538/**
5539 * @opcode 0x8a
5540 */
5541FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5542{
5543 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5544
5545 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5546
5547 /*
5548 * If rm is denoting a register, no more instruction bytes.
5549 */
5550 if (IEM_IS_MODRM_REG_MODE(bRm))
5551 {
5552 IEM_MC_BEGIN(0, 1, 0, 0);
5553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5554 IEM_MC_LOCAL(uint8_t, u8Value);
5555 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5556 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5557 IEM_MC_ADVANCE_RIP_AND_FINISH();
5558 IEM_MC_END();
5559 }
5560 else
5561 {
5562 /*
5563 * We're loading a register from memory.
5564 */
5565 IEM_MC_BEGIN(0, 2, 0, 0);
5566 IEM_MC_LOCAL(uint8_t, u8Value);
5567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5570 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5571 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5572 IEM_MC_ADVANCE_RIP_AND_FINISH();
5573 IEM_MC_END();
5574 }
5575}
5576
5577
5578/**
5579 * @opcode 0x8b
5580 */
5581FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5582{
5583 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5584
5585 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5586
5587 /*
5588 * If rm is denoting a register, no more instruction bytes.
5589 */
5590 if (IEM_IS_MODRM_REG_MODE(bRm))
5591 {
5592 switch (pVCpu->iem.s.enmEffOpSize)
5593 {
5594 case IEMMODE_16BIT:
5595 IEM_MC_BEGIN(0, 1, 0, 0);
5596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5597 IEM_MC_LOCAL(uint16_t, u16Value);
5598 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5599 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5600 IEM_MC_ADVANCE_RIP_AND_FINISH();
5601 IEM_MC_END();
5602 break;
5603
5604 case IEMMODE_32BIT:
5605 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5607 IEM_MC_LOCAL(uint32_t, u32Value);
5608 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5609 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5610 IEM_MC_ADVANCE_RIP_AND_FINISH();
5611 IEM_MC_END();
5612 break;
5613
5614 case IEMMODE_64BIT:
5615 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5617 IEM_MC_LOCAL(uint64_t, u64Value);
5618 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5619 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5620 IEM_MC_ADVANCE_RIP_AND_FINISH();
5621 IEM_MC_END();
5622 break;
5623
5624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5625 }
5626 }
5627 else
5628 {
5629 /*
5630 * We're loading a register from memory.
5631 */
5632 switch (pVCpu->iem.s.enmEffOpSize)
5633 {
5634 case IEMMODE_16BIT:
5635 IEM_MC_BEGIN(0, 2, 0, 0);
5636 IEM_MC_LOCAL(uint16_t, u16Value);
5637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5640 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5641 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5642 IEM_MC_ADVANCE_RIP_AND_FINISH();
5643 IEM_MC_END();
5644 break;
5645
5646 case IEMMODE_32BIT:
5647 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5648 IEM_MC_LOCAL(uint32_t, u32Value);
5649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5652 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5653 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5654 IEM_MC_ADVANCE_RIP_AND_FINISH();
5655 IEM_MC_END();
5656 break;
5657
5658 case IEMMODE_64BIT:
5659 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5660 IEM_MC_LOCAL(uint64_t, u64Value);
5661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5664 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5665 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5666 IEM_MC_ADVANCE_RIP_AND_FINISH();
5667 IEM_MC_END();
5668 break;
5669
5670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5671 }
5672 }
5673}
5674
5675
5676/**
5677 * opcode 0x63
5678 * @todo Table fixme
5679 */
5680FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5681{
5682 if (!IEM_IS_64BIT_CODE(pVCpu))
5683 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5684 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5685 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5686 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5687}
5688
5689
5690/**
5691 * @opcode 0x8c
5692 */
5693FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5694{
5695 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5696
5697 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5698
5699 /*
5700 * Check that the destination register exists. The REX.R prefix is ignored.
5701 */
5702 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5703 if (iSegReg > X86_SREG_GS)
5704 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5705
5706 /*
5707 * If rm is denoting a register, no more instruction bytes.
5708 * In that case, the operand size is respected and the upper bits are
5709 * cleared (starting with some pentium).
5710 */
5711 if (IEM_IS_MODRM_REG_MODE(bRm))
5712 {
5713 switch (pVCpu->iem.s.enmEffOpSize)
5714 {
5715 case IEMMODE_16BIT:
5716 IEM_MC_BEGIN(0, 1, 0, 0);
5717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5718 IEM_MC_LOCAL(uint16_t, u16Value);
5719 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5720 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5721 IEM_MC_ADVANCE_RIP_AND_FINISH();
5722 IEM_MC_END();
5723 break;
5724
5725 case IEMMODE_32BIT:
5726 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5728 IEM_MC_LOCAL(uint32_t, u32Value);
5729 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5730 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5731 IEM_MC_ADVANCE_RIP_AND_FINISH();
5732 IEM_MC_END();
5733 break;
5734
5735 case IEMMODE_64BIT:
5736 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5738 IEM_MC_LOCAL(uint64_t, u64Value);
5739 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5740 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5741 IEM_MC_ADVANCE_RIP_AND_FINISH();
5742 IEM_MC_END();
5743 break;
5744
5745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5746 }
5747 }
5748 else
5749 {
5750 /*
5751 * We're saving the register to memory. The access is word sized
5752 * regardless of operand size prefixes.
5753 */
5754#if 0 /* not necessary */
5755 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5756#endif
5757 IEM_MC_BEGIN(0, 2, 0, 0);
5758 IEM_MC_LOCAL(uint16_t, u16Value);
5759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5762 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5763 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5764 IEM_MC_ADVANCE_RIP_AND_FINISH();
5765 IEM_MC_END();
5766 }
5767}
5768
5769
5770
5771
5772/**
5773 * @opcode 0x8d
5774 */
5775FNIEMOP_DEF(iemOp_lea_Gv_M)
5776{
5777 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5778 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5779 if (IEM_IS_MODRM_REG_MODE(bRm))
5780 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5781
5782 switch (pVCpu->iem.s.enmEffOpSize)
5783 {
5784 case IEMMODE_16BIT:
5785 IEM_MC_BEGIN(0, 2, 0, 0);
5786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5789 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
5790 * operand-size, which is usually the case. It'll save an instruction
5791 * and a register. */
5792 IEM_MC_LOCAL(uint16_t, u16Cast);
5793 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5794 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5795 IEM_MC_ADVANCE_RIP_AND_FINISH();
5796 IEM_MC_END();
5797 break;
5798
5799 case IEMMODE_32BIT:
5800 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5804 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
5805 * operand-size, which is usually the case. It'll save an instruction
5806 * and a register. */
5807 IEM_MC_LOCAL(uint32_t, u32Cast);
5808 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5809 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5810 IEM_MC_ADVANCE_RIP_AND_FINISH();
5811 IEM_MC_END();
5812 break;
5813
5814 case IEMMODE_64BIT:
5815 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5819 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5820 IEM_MC_ADVANCE_RIP_AND_FINISH();
5821 IEM_MC_END();
5822 break;
5823
5824 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5825 }
5826}
5827
5828
5829/**
5830 * @opcode 0x8e
5831 */
5832FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5833{
5834 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5835
5836 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5837
5838 /*
5839 * The practical operand size is 16-bit.
5840 */
5841#if 0 /* not necessary */
5842 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5843#endif
5844
5845 /*
5846 * Check that the destination register exists and can be used with this
5847 * instruction. The REX.R prefix is ignored.
5848 */
5849 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5850 /** @todo r=bird: What does 8086 do here wrt CS? */
5851 if ( iSegReg == X86_SREG_CS
5852 || iSegReg > X86_SREG_GS)
5853 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5854
5855 /*
5856 * If rm is denoting a register, no more instruction bytes.
5857 *
5858 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
5859 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
5860 * register. This is a restriction of the current recompiler
5861 * approach.
5862 */
5863 if (IEM_IS_MODRM_REG_MODE(bRm))
5864 {
5865#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
5866 IEM_MC_BEGIN(2, 0, 0, a_fCImplFlags); \
5867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5868 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5869 IEM_MC_ARG(uint16_t, u16Value, 1); \
5870 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5871 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5872 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5873 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5874 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
5875 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
5876 iemCImpl_load_SReg, iSRegArg, u16Value); \
5877 IEM_MC_END()
5878
5879 if (iSegReg == X86_SREG_SS)
5880 {
5881 if (IEM_IS_32BIT_CODE(pVCpu))
5882 {
5883 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5884 }
5885 else
5886 {
5887 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5888 }
5889 }
5890 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5891 {
5892 IEMOP_MOV_SW_EV_REG_BODY(0);
5893 }
5894 else
5895 {
5896 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
5897 }
5898#undef IEMOP_MOV_SW_EV_REG_BODY
5899 }
5900 else
5901 {
5902 /*
5903 * We're loading the register from memory. The access is word sized
5904 * regardless of operand size prefixes.
5905 */
5906#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
5907 IEM_MC_BEGIN(2, 1, 0, a_fCImplFlags); \
5908 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5909 IEM_MC_ARG(uint16_t, u16Value, 1); \
5910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5913 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5914 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5915 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5916 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5917 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
5918 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
5919 iemCImpl_load_SReg, iSRegArg, u16Value); \
5920 IEM_MC_END()
5921
5922 if (iSegReg == X86_SREG_SS)
5923 {
5924 if (IEM_IS_32BIT_CODE(pVCpu))
5925 {
5926 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5927 }
5928 else
5929 {
5930 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5931 }
5932 }
5933 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5934 {
5935 IEMOP_MOV_SW_EV_MEM_BODY(0);
5936 }
5937 else
5938 {
5939 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
5940 }
5941#undef IEMOP_MOV_SW_EV_MEM_BODY
5942 }
5943}
5944
5945
5946/** Opcode 0x8f /0. */
5947FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5948{
5949 /* This bugger is rather annoying as it requires rSP to be updated before
5950 doing the effective address calculations. Will eventually require a
5951 split between the R/M+SIB decoding and the effective address
5952 calculation - which is something that is required for any attempt at
5953 reusing this code for a recompiler. It may also be good to have if we
5954 need to delay #UD exception caused by invalid lock prefixes.
5955
5956 For now, we'll do a mostly safe interpreter-only implementation here. */
5957 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5958 * now until tests show it's checked.. */
5959 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5960
5961 /* Register access is relatively easy and can share code. */
5962 if (IEM_IS_MODRM_REG_MODE(bRm))
5963 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5964
5965 /*
5966 * Memory target.
5967 *
5968 * Intel says that RSP is incremented before it's used in any effective
5969 * address calcuations. This means some serious extra annoyance here since
5970 * we decode and calculate the effective address in one step and like to
5971 * delay committing registers till everything is done.
5972 *
5973 * So, we'll decode and calculate the effective address twice. This will
5974 * require some recoding if turned into a recompiler.
5975 */
5976 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5977
5978#if 1 /* This can be compiled, optimize later if needed. */
5979 switch (pVCpu->iem.s.enmEffOpSize)
5980 {
5981 case IEMMODE_16BIT:
5982 IEM_MC_BEGIN(2, 0, 0, 0);
5983 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
5985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5986 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
5987 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
5988 IEM_MC_END();
5989 break;
5990
5991 case IEMMODE_32BIT:
5992 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
5993 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
5995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5996 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
5997 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
5998 IEM_MC_END();
5999 break;
6000
6001 case IEMMODE_64BIT:
6002 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
6003 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
6005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6006 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6007 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
6008 IEM_MC_END();
6009 break;
6010
6011 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6012 }
6013
6014#else
6015# ifndef TST_IEM_CHECK_MC
6016 /* Calc effective address with modified ESP. */
6017/** @todo testcase */
6018 RTGCPTR GCPtrEff;
6019 VBOXSTRICTRC rcStrict;
6020 switch (pVCpu->iem.s.enmEffOpSize)
6021 {
6022 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
6023 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
6024 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
6025 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6026 }
6027 if (rcStrict != VINF_SUCCESS)
6028 return rcStrict;
6029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6030
6031 /* Perform the operation - this should be CImpl. */
6032 RTUINT64U TmpRsp;
6033 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
6034 switch (pVCpu->iem.s.enmEffOpSize)
6035 {
6036 case IEMMODE_16BIT:
6037 {
6038 uint16_t u16Value;
6039 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
6040 if (rcStrict == VINF_SUCCESS)
6041 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
6042 break;
6043 }
6044
6045 case IEMMODE_32BIT:
6046 {
6047 uint32_t u32Value;
6048 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
6049 if (rcStrict == VINF_SUCCESS)
6050 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
6051 break;
6052 }
6053
6054 case IEMMODE_64BIT:
6055 {
6056 uint64_t u64Value;
6057 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
6058 if (rcStrict == VINF_SUCCESS)
6059 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
6060 break;
6061 }
6062
6063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6064 }
6065 if (rcStrict == VINF_SUCCESS)
6066 {
6067 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
6068 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
6069 }
6070 return rcStrict;
6071
6072# else
6073 return VERR_IEM_IPE_2;
6074# endif
6075#endif
6076}
6077
6078
6079/**
6080 * @opcode 0x8f
6081 */
6082FNIEMOP_DEF(iemOp_Grp1A__xop)
6083{
6084 /*
6085 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
6086 * three byte VEX prefix, except that the mmmmm field cannot have the values
6087 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
6088 */
6089 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6090 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
6091 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
6092
6093 IEMOP_MNEMONIC(xop, "xop");
6094 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
6095 {
6096 /** @todo Test when exctly the XOP conformance checks kick in during
6097 * instruction decoding and fetching (using \#PF). */
6098 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6099 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6100 if ( ( pVCpu->iem.s.fPrefixes
6101 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6102 == 0)
6103 {
6104 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6105 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6106 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6107 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6108 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6109 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6110 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6111 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6112 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6113
6114 /** @todo XOP: Just use new tables and decoders. */
6115 switch (bRm & 0x1f)
6116 {
6117 case 8: /* xop opcode map 8. */
6118 IEMOP_BITCH_ABOUT_STUB();
6119 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6120
6121 case 9: /* xop opcode map 9. */
6122 IEMOP_BITCH_ABOUT_STUB();
6123 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6124
6125 case 10: /* xop opcode map 10. */
6126 IEMOP_BITCH_ABOUT_STUB();
6127 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6128
6129 default:
6130 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6131 IEMOP_RAISE_INVALID_OPCODE_RET();
6132 }
6133 }
6134 else
6135 Log(("XOP: Invalid prefix mix!\n"));
6136 }
6137 else
6138 Log(("XOP: XOP support disabled!\n"));
6139 IEMOP_RAISE_INVALID_OPCODE_RET();
6140}
6141
6142
6143/**
6144 * Common 'xchg reg,rAX' helper.
6145 */
6146FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6147{
6148 iReg |= pVCpu->iem.s.uRexB;
6149 switch (pVCpu->iem.s.enmEffOpSize)
6150 {
6151 case IEMMODE_16BIT:
6152 IEM_MC_BEGIN(0, 2, 0, 0);
6153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6154 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6155 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6156 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6157 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6158 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6159 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6160 IEM_MC_ADVANCE_RIP_AND_FINISH();
6161 IEM_MC_END();
6162 break;
6163
6164 case IEMMODE_32BIT:
6165 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6167 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6168 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6169 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6170 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6171 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6172 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6173 IEM_MC_ADVANCE_RIP_AND_FINISH();
6174 IEM_MC_END();
6175 break;
6176
6177 case IEMMODE_64BIT:
6178 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6180 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6181 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6182 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6183 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6184 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6185 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6186 IEM_MC_ADVANCE_RIP_AND_FINISH();
6187 IEM_MC_END();
6188 break;
6189
6190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6191 }
6192}
6193
6194
6195/**
6196 * @opcode 0x90
6197 */
6198FNIEMOP_DEF(iemOp_nop)
6199{
6200 /* R8/R8D and RAX/EAX can be exchanged. */
6201 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6202 {
6203 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6204 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6205 }
6206
6207 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6208 {
6209 IEMOP_MNEMONIC(pause, "pause");
6210 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6211 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6212 if (!IEM_IS_IN_GUEST(pVCpu))
6213 { /* probable */ }
6214#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6215 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6216 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmx_pause);
6217#endif
6218#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6219 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6220 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_svm_pause);
6221#endif
6222 }
6223 else
6224 IEMOP_MNEMONIC(nop, "nop");
6225 /** @todo testcase: lock nop; lock pause */
6226 IEM_MC_BEGIN(0, 0, 0, 0);
6227 IEMOP_HLP_DONE_DECODING();
6228 IEM_MC_ADVANCE_RIP_AND_FINISH();
6229 IEM_MC_END();
6230}
6231
6232
6233/**
6234 * @opcode 0x91
6235 */
6236FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6237{
6238 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6239 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6240}
6241
6242
6243/**
6244 * @opcode 0x92
6245 */
6246FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6247{
6248 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6249 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6250}
6251
6252
6253/**
6254 * @opcode 0x93
6255 */
6256FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6257{
6258 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6259 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6260}
6261
6262
6263/**
6264 * @opcode 0x94
6265 */
6266FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6267{
6268 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6269 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6270}
6271
6272
6273/**
6274 * @opcode 0x95
6275 */
6276FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6277{
6278 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6279 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6280}
6281
6282
6283/**
6284 * @opcode 0x96
6285 */
6286FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6287{
6288 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6289 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6290}
6291
6292
6293/**
6294 * @opcode 0x97
6295 */
6296FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6297{
6298 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6299 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6300}
6301
6302
6303/**
6304 * @opcode 0x98
6305 */
6306FNIEMOP_DEF(iemOp_cbw)
6307{
6308 switch (pVCpu->iem.s.enmEffOpSize)
6309 {
6310 case IEMMODE_16BIT:
6311 IEMOP_MNEMONIC(cbw, "cbw");
6312 IEM_MC_BEGIN(0, 1, 0, 0);
6313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6314 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6315 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6316 } IEM_MC_ELSE() {
6317 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6318 } IEM_MC_ENDIF();
6319 IEM_MC_ADVANCE_RIP_AND_FINISH();
6320 IEM_MC_END();
6321 break;
6322
6323 case IEMMODE_32BIT:
6324 IEMOP_MNEMONIC(cwde, "cwde");
6325 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6327 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6328 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6329 } IEM_MC_ELSE() {
6330 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6331 } IEM_MC_ENDIF();
6332 IEM_MC_ADVANCE_RIP_AND_FINISH();
6333 IEM_MC_END();
6334 break;
6335
6336 case IEMMODE_64BIT:
6337 IEMOP_MNEMONIC(cdqe, "cdqe");
6338 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6340 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6341 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6342 } IEM_MC_ELSE() {
6343 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6344 } IEM_MC_ENDIF();
6345 IEM_MC_ADVANCE_RIP_AND_FINISH();
6346 IEM_MC_END();
6347 break;
6348
6349 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6350 }
6351}
6352
6353
6354/**
6355 * @opcode 0x99
6356 */
6357FNIEMOP_DEF(iemOp_cwd)
6358{
6359 switch (pVCpu->iem.s.enmEffOpSize)
6360 {
6361 case IEMMODE_16BIT:
6362 IEMOP_MNEMONIC(cwd, "cwd");
6363 IEM_MC_BEGIN(0, 1, 0, 0);
6364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6365 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6366 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6367 } IEM_MC_ELSE() {
6368 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6369 } IEM_MC_ENDIF();
6370 IEM_MC_ADVANCE_RIP_AND_FINISH();
6371 IEM_MC_END();
6372 break;
6373
6374 case IEMMODE_32BIT:
6375 IEMOP_MNEMONIC(cdq, "cdq");
6376 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6378 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6379 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6380 } IEM_MC_ELSE() {
6381 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6382 } IEM_MC_ENDIF();
6383 IEM_MC_ADVANCE_RIP_AND_FINISH();
6384 IEM_MC_END();
6385 break;
6386
6387 case IEMMODE_64BIT:
6388 IEMOP_MNEMONIC(cqo, "cqo");
6389 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6391 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6392 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6393 } IEM_MC_ELSE() {
6394 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6395 } IEM_MC_ENDIF();
6396 IEM_MC_ADVANCE_RIP_AND_FINISH();
6397 IEM_MC_END();
6398 break;
6399
6400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6401 }
6402}
6403
6404
6405/**
6406 * @opcode 0x9a
6407 */
6408FNIEMOP_DEF(iemOp_call_Ap)
6409{
6410 IEMOP_MNEMONIC(call_Ap, "call Ap");
6411 IEMOP_HLP_NO_64BIT();
6412
6413 /* Decode the far pointer address and pass it on to the far call C implementation. */
6414 uint32_t off32Seg;
6415 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6416 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6417 else
6418 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6419 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6421 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
6422 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
6423 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6424 /** @todo make task-switches, ring-switches, ++ return non-zero status */
6425}
6426
6427
6428/** Opcode 0x9b. (aka fwait) */
6429FNIEMOP_DEF(iemOp_wait)
6430{
6431 IEMOP_MNEMONIC(wait, "wait");
6432 IEM_MC_BEGIN(0, 0, 0, 0);
6433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6434 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6435 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6436 IEM_MC_ADVANCE_RIP_AND_FINISH();
6437 IEM_MC_END();
6438}
6439
6440
6441/**
6442 * @opcode 0x9c
6443 */
6444FNIEMOP_DEF(iemOp_pushf_Fv)
6445{
6446 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6448 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6449 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6450 iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6451}
6452
6453
6454/**
6455 * @opcode 0x9d
6456 */
6457FNIEMOP_DEF(iemOp_popf_Fv)
6458{
6459 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6461 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6462 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6463 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6464 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6465}
6466
6467
6468/**
6469 * @opcode 0x9e
6470 * @opflmodify cf,pf,af,zf,sf
6471 */
6472FNIEMOP_DEF(iemOp_sahf)
6473{
6474 IEMOP_MNEMONIC(sahf, "sahf");
6475 if ( IEM_IS_64BIT_CODE(pVCpu)
6476 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6477 IEMOP_RAISE_INVALID_OPCODE_RET();
6478 IEM_MC_BEGIN(0, 2, 0, 0);
6479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6480 IEM_MC_LOCAL(uint32_t, u32Flags);
6481 IEM_MC_LOCAL(uint32_t, EFlags);
6482 IEM_MC_FETCH_EFLAGS(EFlags);
6483 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6484 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6485 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6486 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6487 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6488 IEM_MC_COMMIT_EFLAGS(EFlags);
6489 IEM_MC_ADVANCE_RIP_AND_FINISH();
6490 IEM_MC_END();
6491}
6492
6493
6494/**
6495 * @opcode 0x9f
6496 * @opfltest cf,pf,af,zf,sf
6497 */
6498FNIEMOP_DEF(iemOp_lahf)
6499{
6500 IEMOP_MNEMONIC(lahf, "lahf");
6501 if ( IEM_IS_64BIT_CODE(pVCpu)
6502 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6503 IEMOP_RAISE_INVALID_OPCODE_RET();
6504 IEM_MC_BEGIN(0, 1, 0, 0);
6505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6506 IEM_MC_LOCAL(uint8_t, u8Flags);
6507 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6508 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6509 IEM_MC_ADVANCE_RIP_AND_FINISH();
6510 IEM_MC_END();
6511}
6512
6513
6514/**
6515 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6516 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6517 * Will return/throw on failures.
6518 * @param a_GCPtrMemOff The variable to store the offset in.
6519 */
6520#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6521 do \
6522 { \
6523 switch (pVCpu->iem.s.enmEffAddrMode) \
6524 { \
6525 case IEMMODE_16BIT: \
6526 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6527 break; \
6528 case IEMMODE_32BIT: \
6529 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6530 break; \
6531 case IEMMODE_64BIT: \
6532 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6533 break; \
6534 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6535 } \
6536 } while (0)
6537
6538/**
6539 * @opcode 0xa0
6540 */
6541FNIEMOP_DEF(iemOp_mov_AL_Ob)
6542{
6543 /*
6544 * Get the offset.
6545 */
6546 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6547 RTGCPTR GCPtrMemOffDecode;
6548 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6549
6550 /*
6551 * Fetch AL.
6552 */
6553 IEM_MC_BEGIN(0, 2, 0, 0);
6554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6555 IEM_MC_LOCAL(uint8_t, u8Tmp);
6556 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6557 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6558 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6559 IEM_MC_ADVANCE_RIP_AND_FINISH();
6560 IEM_MC_END();
6561}
6562
6563
6564/**
6565 * @opcode 0xa1
6566 */
6567FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6568{
6569 /*
6570 * Get the offset.
6571 */
6572 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6573 RTGCPTR GCPtrMemOffDecode;
6574 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6575
6576 /*
6577 * Fetch rAX.
6578 */
6579 switch (pVCpu->iem.s.enmEffOpSize)
6580 {
6581 case IEMMODE_16BIT:
6582 IEM_MC_BEGIN(0, 2, 0, 0);
6583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6584 IEM_MC_LOCAL(uint16_t, u16Tmp);
6585 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6586 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6587 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6588 IEM_MC_ADVANCE_RIP_AND_FINISH();
6589 IEM_MC_END();
6590 break;
6591
6592 case IEMMODE_32BIT:
6593 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6595 IEM_MC_LOCAL(uint32_t, u32Tmp);
6596 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6597 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6598 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6599 IEM_MC_ADVANCE_RIP_AND_FINISH();
6600 IEM_MC_END();
6601 break;
6602
6603 case IEMMODE_64BIT:
6604 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6606 IEM_MC_LOCAL(uint64_t, u64Tmp);
6607 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6608 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6609 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6610 IEM_MC_ADVANCE_RIP_AND_FINISH();
6611 IEM_MC_END();
6612 break;
6613
6614 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6615 }
6616}
6617
6618
6619/**
6620 * @opcode 0xa2
6621 */
6622FNIEMOP_DEF(iemOp_mov_Ob_AL)
6623{
6624 /*
6625 * Get the offset.
6626 */
6627 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6628 RTGCPTR GCPtrMemOffDecode;
6629 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6630
6631 /*
6632 * Store AL.
6633 */
6634 IEM_MC_BEGIN(0, 2, 0, 0);
6635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6636 IEM_MC_LOCAL(uint8_t, u8Tmp);
6637 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6638 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6639 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6640 IEM_MC_ADVANCE_RIP_AND_FINISH();
6641 IEM_MC_END();
6642}
6643
6644
6645/**
6646 * @opcode 0xa3
6647 */
6648FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6649{
6650 /*
6651 * Get the offset.
6652 */
6653 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6654 RTGCPTR GCPtrMemOffDecode;
6655 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6656
6657 /*
6658 * Store rAX.
6659 */
6660 switch (pVCpu->iem.s.enmEffOpSize)
6661 {
6662 case IEMMODE_16BIT:
6663 IEM_MC_BEGIN(0, 2, 0, 0);
6664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6665 IEM_MC_LOCAL(uint16_t, u16Tmp);
6666 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6667 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6668 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6669 IEM_MC_ADVANCE_RIP_AND_FINISH();
6670 IEM_MC_END();
6671 break;
6672
6673 case IEMMODE_32BIT:
6674 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6676 IEM_MC_LOCAL(uint32_t, u32Tmp);
6677 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6678 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6679 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6680 IEM_MC_ADVANCE_RIP_AND_FINISH();
6681 IEM_MC_END();
6682 break;
6683
6684 case IEMMODE_64BIT:
6685 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6687 IEM_MC_LOCAL(uint64_t, u64Tmp);
6688 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6689 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6690 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6691 IEM_MC_ADVANCE_RIP_AND_FINISH();
6692 IEM_MC_END();
6693 break;
6694
6695 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6696 }
6697}
6698
6699/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6700#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
6701 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6703 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6704 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6705 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6706 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6707 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6708 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6709 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6710 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6711 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6712 } IEM_MC_ELSE() { \
6713 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6714 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6715 } IEM_MC_ENDIF(); \
6716 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6717 IEM_MC_END() \
6718
6719/**
6720 * @opcode 0xa4
6721 * @opfltest df
6722 */
6723FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6724{
6725 /*
6726 * Use the C implementation if a repeat prefix is encountered.
6727 */
6728 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6729 {
6730 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6732 switch (pVCpu->iem.s.enmEffAddrMode)
6733 {
6734 case IEMMODE_16BIT:
6735 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6736 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6737 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6738 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6739 iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6740 case IEMMODE_32BIT:
6741 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6742 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6743 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6744 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6745 iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6746 case IEMMODE_64BIT:
6747 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6748 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6749 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6750 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6751 iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6752 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6753 }
6754 }
6755
6756 /*
6757 * Sharing case implementation with movs[wdq] below.
6758 */
6759 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6760 switch (pVCpu->iem.s.enmEffAddrMode)
6761 {
6762 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6763 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6764 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
6765 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6766 }
6767}
6768
6769
6770/**
6771 * @opcode 0xa5
6772 * @opfltest df
6773 */
6774FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6775{
6776
6777 /*
6778 * Use the C implementation if a repeat prefix is encountered.
6779 */
6780 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6781 {
6782 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6784 switch (pVCpu->iem.s.enmEffOpSize)
6785 {
6786 case IEMMODE_16BIT:
6787 switch (pVCpu->iem.s.enmEffAddrMode)
6788 {
6789 case IEMMODE_16BIT:
6790 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6791 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6792 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6793 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6794 iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6795 case IEMMODE_32BIT:
6796 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6797 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6798 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6799 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6800 iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6801 case IEMMODE_64BIT:
6802 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6803 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6804 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6805 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6806 iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6807 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6808 }
6809 break;
6810 case IEMMODE_32BIT:
6811 switch (pVCpu->iem.s.enmEffAddrMode)
6812 {
6813 case IEMMODE_16BIT:
6814 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6815 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6816 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6817 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6818 iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6819 case IEMMODE_32BIT:
6820 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6821 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6822 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6823 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6824 iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6825 case IEMMODE_64BIT:
6826 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6827 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6828 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6829 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6830 iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6831 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6832 }
6833 case IEMMODE_64BIT:
6834 switch (pVCpu->iem.s.enmEffAddrMode)
6835 {
6836 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6837 case IEMMODE_32BIT:
6838 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6839 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6840 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6841 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6842 iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6843 case IEMMODE_64BIT:
6844 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6845 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6846 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6847 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6848 iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6850 }
6851 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6852 }
6853 }
6854
6855 /*
6856 * Annoying double switch here.
6857 * Using ugly macro for implementing the cases, sharing it with movsb.
6858 */
6859 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6860 switch (pVCpu->iem.s.enmEffOpSize)
6861 {
6862 case IEMMODE_16BIT:
6863 switch (pVCpu->iem.s.enmEffAddrMode)
6864 {
6865 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6866 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6867 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
6868 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6869 }
6870 break;
6871
6872 case IEMMODE_32BIT:
6873 switch (pVCpu->iem.s.enmEffAddrMode)
6874 {
6875 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6876 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6877 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
6878 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6879 }
6880 break;
6881
6882 case IEMMODE_64BIT:
6883 switch (pVCpu->iem.s.enmEffAddrMode)
6884 {
6885 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6886 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
6887 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
6888 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6889 }
6890 break;
6891 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6892 }
6893}
6894
6895#undef IEM_MOVS_CASE
6896
6897/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6898#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
6899 IEM_MC_BEGIN(3, 3, a_fMcFlags, 0); \
6900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6901 \
6902 IEM_MC_LOCAL(RTGCPTR, uAddr1); \
6903 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr1, X86_GREG_xSI); \
6904 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6905 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr1); \
6906 \
6907 IEM_MC_LOCAL(RTGCPTR, uAddr2); \
6908 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr2, X86_GREG_xDI); \
6909 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6910 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr2); \
6911 \
6912 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6913 IEM_MC_REF_EFLAGS(pEFlags); \
6914 IEM_MC_ARG_LOCAL_REF(uint##ValBits##_t *, puValue1, uValue1, 0); \
6915 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6916 \
6917 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6918 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6919 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6920 } IEM_MC_ELSE() { \
6921 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6922 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6923 } IEM_MC_ENDIF(); \
6924 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6925 IEM_MC_END() \
6926
6927/**
6928 * @opcode 0xa6
6929 * @opflclass arithmetic
6930 * @opfltest df
6931 */
6932FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6933{
6934
6935 /*
6936 * Use the C implementation if a repeat prefix is encountered.
6937 */
6938 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6939 {
6940 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6942 switch (pVCpu->iem.s.enmEffAddrMode)
6943 {
6944 case IEMMODE_16BIT:
6945 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6946 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6947 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6948 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6949 iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6950 case IEMMODE_32BIT:
6951 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6952 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6953 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6954 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6955 iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6956 case IEMMODE_64BIT:
6957 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6958 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6959 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6960 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6961 iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6962 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6963 }
6964 }
6965 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6966 {
6967 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6969 switch (pVCpu->iem.s.enmEffAddrMode)
6970 {
6971 case IEMMODE_16BIT:
6972 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6973 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6974 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6975 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6976 iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6977 case IEMMODE_32BIT:
6978 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6979 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6980 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6981 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6982 iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6983 case IEMMODE_64BIT:
6984 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6985 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6986 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6987 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6988 iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6989 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6990 }
6991 }
6992
6993 /*
6994 * Sharing case implementation with cmps[wdq] below.
6995 */
6996 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
6997 switch (pVCpu->iem.s.enmEffAddrMode)
6998 {
6999 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7000 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7001 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
7002 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7003 }
7004}
7005
7006
7007/**
7008 * @opcode 0xa7
7009 * @opflclass arithmetic
7010 * @opfltest df
7011 */
7012FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
7013{
7014 /*
7015 * Use the C implementation if a repeat prefix is encountered.
7016 */
7017 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7018 {
7019 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
7020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7021 switch (pVCpu->iem.s.enmEffOpSize)
7022 {
7023 case IEMMODE_16BIT:
7024 switch (pVCpu->iem.s.enmEffAddrMode)
7025 {
7026 case IEMMODE_16BIT:
7027 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7028 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7029 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7030 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7031 iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7032 case IEMMODE_32BIT:
7033 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7034 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7035 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7036 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7037 iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7038 case IEMMODE_64BIT:
7039 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7040 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7041 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7042 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7043 iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7044 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7045 }
7046 break;
7047 case IEMMODE_32BIT:
7048 switch (pVCpu->iem.s.enmEffAddrMode)
7049 {
7050 case IEMMODE_16BIT:
7051 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7052 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7053 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7054 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7055 iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7056 case IEMMODE_32BIT:
7057 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7058 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7059 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7060 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7061 iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7062 case IEMMODE_64BIT:
7063 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7064 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7065 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7066 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7067 iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7068 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7069 }
7070 case IEMMODE_64BIT:
7071 switch (pVCpu->iem.s.enmEffAddrMode)
7072 {
7073 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
7074 case IEMMODE_32BIT:
7075 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7076 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7077 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7078 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7079 iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7080 case IEMMODE_64BIT:
7081 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7082 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7083 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7084 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7085 iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7087 }
7088 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7089 }
7090 }
7091
7092 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7093 {
7094 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
7095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7096 switch (pVCpu->iem.s.enmEffOpSize)
7097 {
7098 case IEMMODE_16BIT:
7099 switch (pVCpu->iem.s.enmEffAddrMode)
7100 {
7101 case IEMMODE_16BIT:
7102 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7103 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7104 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7105 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7106 iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7107 case IEMMODE_32BIT:
7108 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7109 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7110 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7111 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7112 iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7113 case IEMMODE_64BIT:
7114 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7115 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7116 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7117 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7118 iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7119 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7120 }
7121 break;
7122 case IEMMODE_32BIT:
7123 switch (pVCpu->iem.s.enmEffAddrMode)
7124 {
7125 case IEMMODE_16BIT:
7126 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7127 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7128 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7129 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7130 iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7131 case IEMMODE_32BIT:
7132 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7133 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7134 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7135 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7136 iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7137 case IEMMODE_64BIT:
7138 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7139 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7140 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7141 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7142 iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7143 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7144 }
7145 case IEMMODE_64BIT:
7146 switch (pVCpu->iem.s.enmEffAddrMode)
7147 {
7148 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
7149 case IEMMODE_32BIT:
7150 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7151 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7152 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7153 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7154 iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7155 case IEMMODE_64BIT:
7156 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7157 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7158 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7159 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7160 iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7162 }
7163 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7164 }
7165 }
7166
7167 /*
7168 * Annoying double switch here.
7169 * Using ugly macro for implementing the cases, sharing it with cmpsb.
7170 */
7171 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
7172 switch (pVCpu->iem.s.enmEffOpSize)
7173 {
7174 case IEMMODE_16BIT:
7175 switch (pVCpu->iem.s.enmEffAddrMode)
7176 {
7177 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7178 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7179 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
7180 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7181 }
7182 break;
7183
7184 case IEMMODE_32BIT:
7185 switch (pVCpu->iem.s.enmEffAddrMode)
7186 {
7187 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7188 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7189 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
7190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7191 }
7192 break;
7193
7194 case IEMMODE_64BIT:
7195 switch (pVCpu->iem.s.enmEffAddrMode)
7196 {
7197 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7198 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
7199 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
7200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7201 }
7202 break;
7203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7204 }
7205}
7206
7207#undef IEM_CMPS_CASE
7208
7209/**
7210 * @opcode 0xa8
7211 * @opflclass logical
7212 */
7213FNIEMOP_DEF(iemOp_test_AL_Ib)
7214{
7215 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
7216 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7217 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
7218}
7219
7220
7221/**
7222 * @opcode 0xa9
7223 * @opflclass logical
7224 */
7225FNIEMOP_DEF(iemOp_test_eAX_Iz)
7226{
7227 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
7228 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7229 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
7230}
7231
7232
7233/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
7234#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
7235 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7237 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7238 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7239 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
7240 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7241 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7242 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7243 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7244 } IEM_MC_ELSE() { \
7245 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7246 } IEM_MC_ENDIF(); \
7247 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7248 IEM_MC_END() \
7249
7250/**
7251 * @opcode 0xaa
7252 */
7253FNIEMOP_DEF(iemOp_stosb_Yb_AL)
7254{
7255 /*
7256 * Use the C implementation if a repeat prefix is encountered.
7257 */
7258 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7259 {
7260 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
7261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7262 switch (pVCpu->iem.s.enmEffAddrMode)
7263 {
7264 case IEMMODE_16BIT:
7265 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7266 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7267 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7268 iemCImpl_stos_al_m16);
7269 case IEMMODE_32BIT:
7270 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7271 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7272 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7273 iemCImpl_stos_al_m32);
7274 case IEMMODE_64BIT:
7275 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7276 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7277 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7278 iemCImpl_stos_al_m64);
7279 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7280 }
7281 }
7282
7283 /*
7284 * Sharing case implementation with stos[wdq] below.
7285 */
7286 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
7287 switch (pVCpu->iem.s.enmEffAddrMode)
7288 {
7289 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7290 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7291 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
7292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7293 }
7294}
7295
7296
7297/**
7298 * @opcode 0xab
7299 */
7300FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7301{
7302 /*
7303 * Use the C implementation if a repeat prefix is encountered.
7304 */
7305 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7306 {
7307 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7309 switch (pVCpu->iem.s.enmEffOpSize)
7310 {
7311 case IEMMODE_16BIT:
7312 switch (pVCpu->iem.s.enmEffAddrMode)
7313 {
7314 case IEMMODE_16BIT:
7315 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7316 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7317 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7318 iemCImpl_stos_ax_m16);
7319 case IEMMODE_32BIT:
7320 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7321 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7322 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7323 iemCImpl_stos_ax_m32);
7324 case IEMMODE_64BIT:
7325 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7326 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7327 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7328 iemCImpl_stos_ax_m64);
7329 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7330 }
7331 break;
7332 case IEMMODE_32BIT:
7333 switch (pVCpu->iem.s.enmEffAddrMode)
7334 {
7335 case IEMMODE_16BIT:
7336 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7337 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7338 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7339 iemCImpl_stos_eax_m16);
7340 case IEMMODE_32BIT:
7341 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7342 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7343 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7344 iemCImpl_stos_eax_m32);
7345 case IEMMODE_64BIT:
7346 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7347 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7348 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7349 iemCImpl_stos_eax_m64);
7350 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7351 }
7352 case IEMMODE_64BIT:
7353 switch (pVCpu->iem.s.enmEffAddrMode)
7354 {
7355 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7356 case IEMMODE_32BIT:
7357 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7358 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7359 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7360 iemCImpl_stos_rax_m32);
7361 case IEMMODE_64BIT:
7362 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7363 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7364 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7365 iemCImpl_stos_rax_m64);
7366 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7367 }
7368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7369 }
7370 }
7371
7372 /*
7373 * Annoying double switch here.
7374 * Using ugly macro for implementing the cases, sharing it with stosb.
7375 */
7376 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7377 switch (pVCpu->iem.s.enmEffOpSize)
7378 {
7379 case IEMMODE_16BIT:
7380 switch (pVCpu->iem.s.enmEffAddrMode)
7381 {
7382 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7383 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7384 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7385 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7386 }
7387 break;
7388
7389 case IEMMODE_32BIT:
7390 switch (pVCpu->iem.s.enmEffAddrMode)
7391 {
7392 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7393 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7394 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7395 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7396 }
7397 break;
7398
7399 case IEMMODE_64BIT:
7400 switch (pVCpu->iem.s.enmEffAddrMode)
7401 {
7402 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7403 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7404 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7405 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7406 }
7407 break;
7408 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7409 }
7410}
7411
7412#undef IEM_STOS_CASE
7413
7414/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7415#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7416 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7418 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7419 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7420 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7421 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7422 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7423 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7424 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7425 } IEM_MC_ELSE() { \
7426 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7427 } IEM_MC_ENDIF(); \
7428 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7429 IEM_MC_END() \
7430
7431/**
7432 * @opcode 0xac
7433 * @opfltest df
7434 */
7435FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7436{
7437 /*
7438 * Use the C implementation if a repeat prefix is encountered.
7439 */
7440 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7441 {
7442 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7444 switch (pVCpu->iem.s.enmEffAddrMode)
7445 {
7446 case IEMMODE_16BIT:
7447 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7448 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7449 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7450 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7451 iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7452 case IEMMODE_32BIT:
7453 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7454 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7455 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7456 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7457 iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7458 case IEMMODE_64BIT:
7459 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7460 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7461 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7462 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7463 iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7464 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7465 }
7466 }
7467
7468 /*
7469 * Sharing case implementation with stos[wdq] below.
7470 */
7471 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7472 switch (pVCpu->iem.s.enmEffAddrMode)
7473 {
7474 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7475 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7476 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7477 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7478 }
7479}
7480
7481
7482/**
7483 * @opcode 0xad
7484 * @opfltest df
7485 */
7486FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7487{
7488 /*
7489 * Use the C implementation if a repeat prefix is encountered.
7490 */
7491 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7492 {
7493 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7495 switch (pVCpu->iem.s.enmEffOpSize)
7496 {
7497 case IEMMODE_16BIT:
7498 switch (pVCpu->iem.s.enmEffAddrMode)
7499 {
7500 case IEMMODE_16BIT:
7501 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7502 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7503 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7504 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7505 iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7506 case IEMMODE_32BIT:
7507 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7508 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7509 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7510 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7511 iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7512 case IEMMODE_64BIT:
7513 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7514 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7515 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7516 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7517 iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7518 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7519 }
7520 break;
7521 case IEMMODE_32BIT:
7522 switch (pVCpu->iem.s.enmEffAddrMode)
7523 {
7524 case IEMMODE_16BIT:
7525 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7526 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7527 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7528 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7529 iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7530 case IEMMODE_32BIT:
7531 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7532 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7533 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7534 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7535 iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7536 case IEMMODE_64BIT:
7537 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7538 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7539 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7540 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7541 iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7542 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7543 }
7544 case IEMMODE_64BIT:
7545 switch (pVCpu->iem.s.enmEffAddrMode)
7546 {
7547 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7548 case IEMMODE_32BIT:
7549 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7550 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7551 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7552 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7553 iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7554 case IEMMODE_64BIT:
7555 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7556 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7557 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7558 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7559 iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7560 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7561 }
7562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7563 }
7564 }
7565
7566 /*
7567 * Annoying double switch here.
7568 * Using ugly macro for implementing the cases, sharing it with lodsb.
7569 */
7570 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7571 switch (pVCpu->iem.s.enmEffOpSize)
7572 {
7573 case IEMMODE_16BIT:
7574 switch (pVCpu->iem.s.enmEffAddrMode)
7575 {
7576 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7577 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7578 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
7579 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7580 }
7581 break;
7582
7583 case IEMMODE_32BIT:
7584 switch (pVCpu->iem.s.enmEffAddrMode)
7585 {
7586 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7587 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7588 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
7589 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7590 }
7591 break;
7592
7593 case IEMMODE_64BIT:
7594 switch (pVCpu->iem.s.enmEffAddrMode)
7595 {
7596 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7597 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
7598 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
7599 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7600 }
7601 break;
7602 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7603 }
7604}
7605
7606#undef IEM_LODS_CASE
7607
7608/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7609#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
7610 IEM_MC_BEGIN(3, 2, a_fMcFlags, 0); \
7611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7612 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7613 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7614 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7615 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7616 \
7617 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7618 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7619 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7620 IEM_MC_REF_EFLAGS(pEFlags); \
7621 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7622 \
7623 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7624 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7625 } IEM_MC_ELSE() { \
7626 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7627 } IEM_MC_ENDIF(); \
7628 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7629 IEM_MC_END();
7630
7631/**
7632 * @opcode 0xae
7633 * @opflclass arithmetic
7634 * @opfltest df
7635 */
7636FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7637{
7638 /*
7639 * Use the C implementation if a repeat prefix is encountered.
7640 */
7641 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7642 {
7643 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7645 switch (pVCpu->iem.s.enmEffAddrMode)
7646 {
7647 case IEMMODE_16BIT:
7648 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7649 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7650 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7651 iemCImpl_repe_scas_al_m16);
7652 case IEMMODE_32BIT:
7653 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7654 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7655 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7656 iemCImpl_repe_scas_al_m32);
7657 case IEMMODE_64BIT:
7658 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7659 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7660 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7661 iemCImpl_repe_scas_al_m64);
7662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7663 }
7664 }
7665 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7666 {
7667 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7669 switch (pVCpu->iem.s.enmEffAddrMode)
7670 {
7671 case IEMMODE_16BIT:
7672 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7673 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7674 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7675 iemCImpl_repne_scas_al_m16);
7676 case IEMMODE_32BIT:
7677 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7678 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7679 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7680 iemCImpl_repne_scas_al_m32);
7681 case IEMMODE_64BIT:
7682 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7683 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7684 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7685 iemCImpl_repne_scas_al_m64);
7686 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7687 }
7688 }
7689
7690 /*
7691 * Sharing case implementation with stos[wdq] below.
7692 */
7693 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7694 switch (pVCpu->iem.s.enmEffAddrMode)
7695 {
7696 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7697 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7698 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
7699 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7700 }
7701}
7702
7703
7704/**
7705 * @opcode 0xaf
7706 * @opflclass arithmetic
7707 * @opfltest df
7708 */
7709FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7710{
7711 /*
7712 * Use the C implementation if a repeat prefix is encountered.
7713 */
7714 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7715 {
7716 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7718 switch (pVCpu->iem.s.enmEffOpSize)
7719 {
7720 case IEMMODE_16BIT:
7721 switch (pVCpu->iem.s.enmEffAddrMode)
7722 {
7723 case IEMMODE_16BIT:
7724 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7725 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7726 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7727 iemCImpl_repe_scas_ax_m16);
7728 case IEMMODE_32BIT:
7729 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7730 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7731 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7732 iemCImpl_repe_scas_ax_m32);
7733 case IEMMODE_64BIT:
7734 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7735 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7736 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7737 iemCImpl_repe_scas_ax_m64);
7738 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7739 }
7740 break;
7741 case IEMMODE_32BIT:
7742 switch (pVCpu->iem.s.enmEffAddrMode)
7743 {
7744 case IEMMODE_16BIT:
7745 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7746 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7747 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7748 iemCImpl_repe_scas_eax_m16);
7749 case IEMMODE_32BIT:
7750 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7751 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7752 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7753 iemCImpl_repe_scas_eax_m32);
7754 case IEMMODE_64BIT:
7755 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7756 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7757 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7758 iemCImpl_repe_scas_eax_m64);
7759 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7760 }
7761 case IEMMODE_64BIT:
7762 switch (pVCpu->iem.s.enmEffAddrMode)
7763 {
7764 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7765 case IEMMODE_32BIT:
7766 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7767 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7768 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7769 iemCImpl_repe_scas_rax_m32);
7770 case IEMMODE_64BIT:
7771 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7772 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7773 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7774 iemCImpl_repe_scas_rax_m64);
7775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7776 }
7777 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7778 }
7779 }
7780 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7781 {
7782 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7784 switch (pVCpu->iem.s.enmEffOpSize)
7785 {
7786 case IEMMODE_16BIT:
7787 switch (pVCpu->iem.s.enmEffAddrMode)
7788 {
7789 case IEMMODE_16BIT:
7790 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7791 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7792 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7793 iemCImpl_repne_scas_ax_m16);
7794 case IEMMODE_32BIT:
7795 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7796 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7797 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7798 iemCImpl_repne_scas_ax_m32);
7799 case IEMMODE_64BIT:
7800 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7801 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7802 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7803 iemCImpl_repne_scas_ax_m64);
7804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7805 }
7806 break;
7807 case IEMMODE_32BIT:
7808 switch (pVCpu->iem.s.enmEffAddrMode)
7809 {
7810 case IEMMODE_16BIT:
7811 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7812 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7813 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7814 iemCImpl_repne_scas_eax_m16);
7815 case IEMMODE_32BIT:
7816 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7817 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7818 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7819 iemCImpl_repne_scas_eax_m32);
7820 case IEMMODE_64BIT:
7821 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7822 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7823 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7824 iemCImpl_repne_scas_eax_m64);
7825 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7826 }
7827 case IEMMODE_64BIT:
7828 switch (pVCpu->iem.s.enmEffAddrMode)
7829 {
7830 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
7831 case IEMMODE_32BIT:
7832 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7833 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7834 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7835 iemCImpl_repne_scas_rax_m32);
7836 case IEMMODE_64BIT:
7837 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7838 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7839 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7840 iemCImpl_repne_scas_rax_m64);
7841 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7842 }
7843 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7844 }
7845 }
7846
7847 /*
7848 * Annoying double switch here.
7849 * Using ugly macro for implementing the cases, sharing it with scasb.
7850 */
7851 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
7852 switch (pVCpu->iem.s.enmEffOpSize)
7853 {
7854 case IEMMODE_16BIT:
7855 switch (pVCpu->iem.s.enmEffAddrMode)
7856 {
7857 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7858 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7859 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
7860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7861 }
7862 break;
7863
7864 case IEMMODE_32BIT:
7865 switch (pVCpu->iem.s.enmEffAddrMode)
7866 {
7867 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7868 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7869 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
7870 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7871 }
7872 break;
7873
7874 case IEMMODE_64BIT:
7875 switch (pVCpu->iem.s.enmEffAddrMode)
7876 {
7877 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7878 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
7879 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
7880 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7881 }
7882 break;
7883 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7884 }
7885}
7886
7887#undef IEM_SCAS_CASE
7888
7889/**
7890 * Common 'mov r8, imm8' helper.
7891 */
7892FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
7893{
7894 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7895 IEM_MC_BEGIN(0, 0, 0, 0);
7896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7897 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
7898 IEM_MC_ADVANCE_RIP_AND_FINISH();
7899 IEM_MC_END();
7900}
7901
7902
7903/**
7904 * @opcode 0xb0
7905 */
7906FNIEMOP_DEF(iemOp_mov_AL_Ib)
7907{
7908 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
7909 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7910}
7911
7912
7913/**
7914 * @opcode 0xb1
7915 */
7916FNIEMOP_DEF(iemOp_CL_Ib)
7917{
7918 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
7919 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7920}
7921
7922
7923/**
7924 * @opcode 0xb2
7925 */
7926FNIEMOP_DEF(iemOp_DL_Ib)
7927{
7928 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
7929 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7930}
7931
7932
7933/**
7934 * @opcode 0xb3
7935 */
7936FNIEMOP_DEF(iemOp_BL_Ib)
7937{
7938 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
7939 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7940}
7941
7942
7943/**
7944 * @opcode 0xb4
7945 */
7946FNIEMOP_DEF(iemOp_mov_AH_Ib)
7947{
7948 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
7949 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7950}
7951
7952
7953/**
7954 * @opcode 0xb5
7955 */
7956FNIEMOP_DEF(iemOp_CH_Ib)
7957{
7958 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
7959 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7960}
7961
7962
7963/**
7964 * @opcode 0xb6
7965 */
7966FNIEMOP_DEF(iemOp_DH_Ib)
7967{
7968 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
7969 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7970}
7971
7972
7973/**
7974 * @opcode 0xb7
7975 */
7976FNIEMOP_DEF(iemOp_BH_Ib)
7977{
7978 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
7979 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7980}
7981
7982
7983/**
7984 * Common 'mov regX,immX' helper.
7985 */
7986FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
7987{
7988 switch (pVCpu->iem.s.enmEffOpSize)
7989 {
7990 case IEMMODE_16BIT:
7991 IEM_MC_BEGIN(0, 0, 0, 0);
7992 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7994 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
7995 IEM_MC_ADVANCE_RIP_AND_FINISH();
7996 IEM_MC_END();
7997 break;
7998
7999 case IEMMODE_32BIT:
8000 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8001 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8003 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
8004 IEM_MC_ADVANCE_RIP_AND_FINISH();
8005 IEM_MC_END();
8006 break;
8007
8008 case IEMMODE_64BIT:
8009 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8010 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
8011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8012 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
8013 IEM_MC_ADVANCE_RIP_AND_FINISH();
8014 IEM_MC_END();
8015 break;
8016 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8017 }
8018}
8019
8020
8021/**
8022 * @opcode 0xb8
8023 */
8024FNIEMOP_DEF(iemOp_eAX_Iv)
8025{
8026 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
8027 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8028}
8029
8030
8031/**
8032 * @opcode 0xb9
8033 */
8034FNIEMOP_DEF(iemOp_eCX_Iv)
8035{
8036 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
8037 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8038}
8039
8040
8041/**
8042 * @opcode 0xba
8043 */
8044FNIEMOP_DEF(iemOp_eDX_Iv)
8045{
8046 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
8047 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8048}
8049
8050
8051/**
8052 * @opcode 0xbb
8053 */
8054FNIEMOP_DEF(iemOp_eBX_Iv)
8055{
8056 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
8057 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8058}
8059
8060
8061/**
8062 * @opcode 0xbc
8063 */
8064FNIEMOP_DEF(iemOp_eSP_Iv)
8065{
8066 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
8067 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8068}
8069
8070
8071/**
8072 * @opcode 0xbd
8073 */
8074FNIEMOP_DEF(iemOp_eBP_Iv)
8075{
8076 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
8077 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8078}
8079
8080
8081/**
8082 * @opcode 0xbe
8083 */
8084FNIEMOP_DEF(iemOp_eSI_Iv)
8085{
8086 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
8087 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8088}
8089
8090
8091/**
8092 * @opcode 0xbf
8093 */
8094FNIEMOP_DEF(iemOp_eDI_Iv)
8095{
8096 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
8097 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8098}
8099
8100
8101/**
8102 * @opcode 0xc0
8103 */
8104FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
8105{
8106 IEMOP_HLP_MIN_186();
8107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8108
8109 /* Need to use a body macro here since the EFLAGS behaviour differs between
8110 the shifts, rotates and rotate w/ carry. Sigh. */
8111#define GRP2_BODY_Eb_Ib(a_pImplExpr) \
8112 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8113 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8114 { \
8115 /* register */ \
8116 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8117 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); \
8118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8119 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8120 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8121 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8122 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8123 IEM_MC_REF_EFLAGS(pEFlags); \
8124 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8125 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8126 IEM_MC_END(); \
8127 } \
8128 else \
8129 { \
8130 /* memory */ \
8131 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0); \
8132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8134 \
8135 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8137 \
8138 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8139 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8140 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8141 \
8142 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8143 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8144 IEM_MC_FETCH_EFLAGS(EFlags); \
8145 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8146 \
8147 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8148 IEM_MC_COMMIT_EFLAGS(EFlags); \
8149 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8150 IEM_MC_END(); \
8151 } (void)0
8152
8153 switch (IEM_GET_MODRM_REG_8(bRm))
8154 {
8155 /**
8156 * @opdone
8157 * @opmaps grp2_c0
8158 * @opcode /0
8159 * @opflclass rotate_count
8160 */
8161 case 0:
8162 {
8163 IEMOP_MNEMONIC2(MI, ROL, rol, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8164 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8165 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8166 break;
8167 }
8168 /**
8169 * @opdone
8170 * @opmaps grp2_c0
8171 * @opcode /1
8172 * @opflclass rotate_count
8173 */
8174 case 1:
8175 {
8176 IEMOP_MNEMONIC2(MI, ROR, ror, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8177 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8178 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8179 break;
8180 }
8181 /**
8182 * @opdone
8183 * @opmaps grp2_c0
8184 * @opcode /2
8185 * @opflclass rotate_carry_count
8186 */
8187 case 2:
8188 {
8189 IEMOP_MNEMONIC2(MI, RCL, rcl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8190 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8191 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8192 break;
8193 }
8194 /**
8195 * @opdone
8196 * @opmaps grp2_c0
8197 * @opcode /3
8198 * @opflclass rotate_carry_count
8199 */
8200 case 3:
8201 {
8202 IEMOP_MNEMONIC2(MI, RCR, rcr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8203 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8204 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8205 break;
8206 }
8207 /**
8208 * @opdone
8209 * @opmaps grp2_c0
8210 * @opcode /4
8211 * @opflclass shift_count
8212 */
8213 case 4:
8214 {
8215 IEMOP_MNEMONIC2(MI, SHL, shl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8216 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8217 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8218 break;
8219 }
8220 /**
8221 * @opdone
8222 * @opmaps grp2_c0
8223 * @opcode /5
8224 * @opflclass shift_count
8225 */
8226 case 5:
8227 {
8228 IEMOP_MNEMONIC2(MI, SHR, shr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8229 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8230 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8231 break;
8232 }
8233 /**
8234 * @opdone
8235 * @opmaps grp2_c0
8236 * @opcode /7
8237 * @opflclass shift_count
8238 */
8239 case 7:
8240 {
8241 IEMOP_MNEMONIC2(MI, SAR, sar, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8242 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8243 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8244 break;
8245 }
8246
8247 /** @opdone */
8248 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8249 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8250 }
8251#undef GRP2_BODY_Eb_Ib
8252}
8253
8254
8255/* Need to use a body macro here since the EFLAGS behaviour differs between
8256 the shifts, rotates and rotate w/ carry. Sigh. */
8257#define GRP2_BODY_Ev_Ib(a_pImplExpr) \
8258 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8259 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8260 { \
8261 /* register */ \
8262 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8263 switch (pVCpu->iem.s.enmEffOpSize) \
8264 { \
8265 case IEMMODE_16BIT: \
8266 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); \
8267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8268 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8269 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8270 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8271 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8272 IEM_MC_REF_EFLAGS(pEFlags); \
8273 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
8274 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8275 IEM_MC_END(); \
8276 break; \
8277 \
8278 case IEMMODE_32BIT: \
8279 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
8280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8281 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8282 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8283 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8284 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8285 IEM_MC_REF_EFLAGS(pEFlags); \
8286 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
8287 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8288 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8289 IEM_MC_END(); \
8290 break; \
8291 \
8292 case IEMMODE_64BIT: \
8293 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
8294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8295 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8296 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8297 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8298 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8299 IEM_MC_REF_EFLAGS(pEFlags); \
8300 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
8301 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8302 IEM_MC_END(); \
8303 break; \
8304 \
8305 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8306 } \
8307 } \
8308 else \
8309 { \
8310 /* memory */ \
8311 switch (pVCpu->iem.s.enmEffOpSize) \
8312 { \
8313 case IEMMODE_16BIT: \
8314 IEM_MC_BEGIN(3, 3, 0, 0); \
8315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8317 \
8318 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8320 \
8321 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8322 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8323 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8324 \
8325 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8326 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8327 IEM_MC_FETCH_EFLAGS(EFlags); \
8328 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
8329 \
8330 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8331 IEM_MC_COMMIT_EFLAGS(EFlags); \
8332 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8333 IEM_MC_END(); \
8334 break; \
8335 \
8336 case IEMMODE_32BIT: \
8337 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
8338 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8340 \
8341 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8343 \
8344 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8345 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8346 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8347 \
8348 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8349 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8350 IEM_MC_FETCH_EFLAGS(EFlags); \
8351 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
8352 \
8353 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8354 IEM_MC_COMMIT_EFLAGS(EFlags); \
8355 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8356 IEM_MC_END(); \
8357 break; \
8358 \
8359 case IEMMODE_64BIT: \
8360 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
8361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8363 \
8364 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8366 \
8367 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8368 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8369 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8370 \
8371 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8372 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8373 IEM_MC_FETCH_EFLAGS(EFlags); \
8374 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
8375 \
8376 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8377 IEM_MC_COMMIT_EFLAGS(EFlags); \
8378 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8379 IEM_MC_END(); \
8380 break; \
8381 \
8382 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8383 } \
8384 } (void)0
8385
8386/**
8387 * @opmaps grp2_c1
8388 * @opcode /0
8389 * @opflclass rotate_count
8390 */
8391FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_Ib, uint8_t, bRm)
8392{
8393 IEMOP_MNEMONIC2(MI, ROL, rol, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8394 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8395}
8396
8397
8398/**
8399 * @opmaps grp2_c1
8400 * @opcode /1
8401 * @opflclass rotate_count
8402 */
8403FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_Ib, uint8_t, bRm)
8404{
8405 IEMOP_MNEMONIC2(MI, ROR, ror, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8406 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8407}
8408
8409
8410/**
8411 * @opmaps grp2_c1
8412 * @opcode /2
8413 * @opflclass rotate_carry_count
8414 */
8415FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_Ib, uint8_t, bRm)
8416{
8417 IEMOP_MNEMONIC2(MI, RCL, rcl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8418 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8419}
8420
8421
8422/**
8423 * @opmaps grp2_c1
8424 * @opcode /3
8425 * @opflclass rotate_carry_count
8426 */
8427FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_Ib, uint8_t, bRm)
8428{
8429 IEMOP_MNEMONIC2(MI, RCR, rcr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8430 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8431}
8432
8433
8434/**
8435 * @opmaps grp2_c1
8436 * @opcode /4
8437 * @opflclass shift_count
8438 */
8439FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_Ib, uint8_t, bRm)
8440{
8441 IEMOP_MNEMONIC2(MI, SHL, shl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8442 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8443}
8444
8445
8446/**
8447 * @opmaps grp2_c1
8448 * @opcode /5
8449 * @opflclass shift_count
8450 */
8451FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_Ib, uint8_t, bRm)
8452{
8453 IEMOP_MNEMONIC2(MI, SHR, shr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8454 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8455}
8456
8457
8458/**
8459 * @opmaps grp2_c1
8460 * @opcode /7
8461 * @opflclass shift_count
8462 */
8463FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_Ib, uint8_t, bRm)
8464{
8465 IEMOP_MNEMONIC2(MI, SAR, sar, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8466 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8467}
8468
8469#undef GRP2_BODY_Ev_Ib
8470
8471/**
8472 * @opcode 0xc1
8473 */
8474FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
8475{
8476 IEMOP_HLP_MIN_186();
8477 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8478
8479 switch (IEM_GET_MODRM_REG_8(bRm))
8480 {
8481 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_Ib, bRm);
8482 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_Ib, bRm);
8483 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_Ib, bRm);
8484 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_Ib, bRm);
8485 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_Ib, bRm);
8486 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_Ib, bRm);
8487 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_Ib, bRm);
8488 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8489 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8490 }
8491}
8492
8493
8494/**
8495 * @opcode 0xc2
8496 */
8497FNIEMOP_DEF(iemOp_retn_Iw)
8498{
8499 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
8500 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8501 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8503 switch (pVCpu->iem.s.enmEffOpSize)
8504 {
8505 case IEMMODE_16BIT:
8506 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8507 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_16, u16Imm);
8508 case IEMMODE_32BIT:
8509 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8510 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_32, u16Imm);
8511 case IEMMODE_64BIT:
8512 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8513 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_64, u16Imm);
8514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8515 }
8516}
8517
8518
8519/**
8520 * @opcode 0xc3
8521 */
8522FNIEMOP_DEF(iemOp_retn)
8523{
8524 IEMOP_MNEMONIC(retn, "retn");
8525 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8527 switch (pVCpu->iem.s.enmEffOpSize)
8528 {
8529 case IEMMODE_16BIT:
8530 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8531 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_16);
8532 case IEMMODE_32BIT:
8533 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8534 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_32);
8535 case IEMMODE_64BIT:
8536 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8537 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_64);
8538 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8539 }
8540}
8541
8542
8543/**
8544 * @opcode 0xc4
8545 */
8546FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
8547{
8548 /* The LDS instruction is invalid 64-bit mode. In legacy and
8549 compatability mode it is invalid with MOD=3.
8550 The use as a VEX prefix is made possible by assigning the inverted
8551 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
8552 outside of 64-bit mode. VEX is not available in real or v86 mode. */
8553 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8554 if ( IEM_IS_64BIT_CODE(pVCpu)
8555 || IEM_IS_MODRM_REG_MODE(bRm) )
8556 {
8557 IEMOP_MNEMONIC(vex3_prefix, "vex3");
8558 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8559 {
8560 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8561 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8562 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
8563 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8564 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8565 if (IEM_IS_64BIT_CODE(pVCpu))
8566 {
8567#if 1
8568 AssertCompile(IEM_OP_PRF_SIZE_REX_W == RT_BIT_32(9));
8569 pVCpu->iem.s.fPrefixes |= (uint32_t)(bVex2 & 0x80) << (9 - 7);
8570 AssertCompile(IEM_OP_PRF_REX_B == RT_BIT_32(25) && IEM_OP_PRF_REX_X == RT_BIT_32(26) && IEM_OP_PRF_REX_R == RT_BIT_32(27));
8571 pVCpu->iem.s.fPrefixes |= (uint32_t)(~bRm & 0xe0) << (25 - 5);
8572#else
8573 if (bVex2 & 0x80 /* VEX.W */)
8574 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
8575 if (~bRm & 0x20 /* VEX.~B */)
8576 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_B;
8577 if (~bRm & 0x40 /* VEX.~X */)
8578 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_X;
8579 if (~bRm & 0x80 /* VEX.~R */)
8580 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_R;
8581#endif
8582 }
8583 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8584 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
8585 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
8586 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
8587 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
8588 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
8589
8590 switch (bRm & 0x1f)
8591 {
8592 case 1: /* 0x0f lead opcode byte. */
8593#ifdef IEM_WITH_VEX
8594 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8595#else
8596 IEMOP_BITCH_ABOUT_STUB();
8597 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8598#endif
8599
8600 case 2: /* 0x0f 0x38 lead opcode bytes. */
8601#ifdef IEM_WITH_VEX
8602 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8603#else
8604 IEMOP_BITCH_ABOUT_STUB();
8605 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8606#endif
8607
8608 case 3: /* 0x0f 0x3a lead opcode bytes. */
8609#ifdef IEM_WITH_VEX
8610 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8611#else
8612 IEMOP_BITCH_ABOUT_STUB();
8613 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8614#endif
8615
8616 default:
8617 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
8618 IEMOP_RAISE_INVALID_OPCODE_RET();
8619 }
8620 }
8621 Log(("VEX3: VEX support disabled!\n"));
8622 IEMOP_RAISE_INVALID_OPCODE_RET();
8623 }
8624
8625 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
8626 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
8627}
8628
8629
8630/**
8631 * @opcode 0xc5
8632 */
8633FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
8634{
8635 /* The LES instruction is invalid 64-bit mode. In legacy and
8636 compatability mode it is invalid with MOD=3.
8637 The use as a VEX prefix is made possible by assigning the inverted
8638 REX.R to the top MOD bit, and the top bit in the inverted register
8639 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
8640 to accessing registers 0..7 in this VEX form. */
8641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8642 if ( IEM_IS_64BIT_CODE(pVCpu)
8643 || IEM_IS_MODRM_REG_MODE(bRm))
8644 {
8645 IEMOP_MNEMONIC(vex2_prefix, "vex2");
8646 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8647 {
8648 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8649 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8650 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8651 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8652 AssertCompile(IEM_OP_PRF_REX_R == RT_BIT_32(27));
8653 pVCpu->iem.s.fPrefixes |= (uint32_t)(~bRm & 0x80) << (27 - 7);
8654 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8655 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
8656 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
8657 pVCpu->iem.s.idxPrefix = bRm & 0x3;
8658
8659#ifdef IEM_WITH_VEX
8660 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8661#else
8662 IEMOP_BITCH_ABOUT_STUB();
8663 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8664#endif
8665 }
8666
8667 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
8668 Log(("VEX2: VEX support disabled!\n"));
8669 IEMOP_RAISE_INVALID_OPCODE_RET();
8670 }
8671
8672 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
8673 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
8674}
8675
8676
8677/**
8678 * @opcode 0xc6
8679 */
8680FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
8681{
8682 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8683 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8684 IEMOP_RAISE_INVALID_OPCODE_RET();
8685 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
8686
8687 if (IEM_IS_MODRM_REG_MODE(bRm))
8688 {
8689 /* register access */
8690 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8691 IEM_MC_BEGIN(0, 0, 0, 0);
8692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8693 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
8694 IEM_MC_ADVANCE_RIP_AND_FINISH();
8695 IEM_MC_END();
8696 }
8697 else
8698 {
8699 /* memory access. */
8700 IEM_MC_BEGIN(0, 1, 0, 0);
8701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8703 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8705 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
8706 IEM_MC_ADVANCE_RIP_AND_FINISH();
8707 IEM_MC_END();
8708 }
8709}
8710
8711
8712/**
8713 * @opcode 0xc7
8714 */
8715FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
8716{
8717 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8718 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
8719 IEMOP_RAISE_INVALID_OPCODE_RET();
8720 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
8721
8722 if (IEM_IS_MODRM_REG_MODE(bRm))
8723 {
8724 /* register access */
8725 switch (pVCpu->iem.s.enmEffOpSize)
8726 {
8727 case IEMMODE_16BIT:
8728 IEM_MC_BEGIN(0, 0, 0, 0);
8729 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8731 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8732 IEM_MC_ADVANCE_RIP_AND_FINISH();
8733 IEM_MC_END();
8734 break;
8735
8736 case IEMMODE_32BIT:
8737 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8738 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8740 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8741 IEM_MC_ADVANCE_RIP_AND_FINISH();
8742 IEM_MC_END();
8743 break;
8744
8745 case IEMMODE_64BIT:
8746 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
8747 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8749 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8750 IEM_MC_ADVANCE_RIP_AND_FINISH();
8751 IEM_MC_END();
8752 break;
8753
8754 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8755 }
8756 }
8757 else
8758 {
8759 /* memory access. */
8760 switch (pVCpu->iem.s.enmEffOpSize)
8761 {
8762 case IEMMODE_16BIT:
8763 IEM_MC_BEGIN(0, 1, 0, 0);
8764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8766 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8768 IEM_MC_STORE_MEM_U16_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8769 IEM_MC_ADVANCE_RIP_AND_FINISH();
8770 IEM_MC_END();
8771 break;
8772
8773 case IEMMODE_32BIT:
8774 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8775 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8777 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8779 IEM_MC_STORE_MEM_U32_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8780 IEM_MC_ADVANCE_RIP_AND_FINISH();
8781 IEM_MC_END();
8782 break;
8783
8784 case IEMMODE_64BIT:
8785 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8788 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8790 IEM_MC_STORE_MEM_U64_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8791 IEM_MC_ADVANCE_RIP_AND_FINISH();
8792 IEM_MC_END();
8793 break;
8794
8795 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8796 }
8797 }
8798}
8799
8800
8801
8802
8803/**
8804 * @opcode 0xc8
8805 */
8806FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8807{
8808 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8809 IEMOP_HLP_MIN_186();
8810 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8811 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
8812 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
8813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8814 IEM_MC_DEFER_TO_CIMPL_3_RET(0,
8815 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8816 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8817 iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
8818}
8819
8820
8821/**
8822 * @opcode 0xc9
8823 */
8824FNIEMOP_DEF(iemOp_leave)
8825{
8826 IEMOP_MNEMONIC(leave, "leave");
8827 IEMOP_HLP_MIN_186();
8828 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8830 IEM_MC_DEFER_TO_CIMPL_1_RET(0,
8831 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8832 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8833 iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
8834}
8835
8836
8837/**
8838 * @opcode 0xca
8839 */
8840FNIEMOP_DEF(iemOp_retf_Iw)
8841{
8842 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
8843 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8845 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8846 | IEM_CIMPL_F_MODE,
8847 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8848 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8849 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8850 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8851 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8852 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8853 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8854 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8855 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8856 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8857 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8858 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8859 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
8860 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
8861 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
8862 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
8863 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
8864 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
8865}
8866
8867
8868/**
8869 * @opcode 0xcb
8870 */
8871FNIEMOP_DEF(iemOp_retf)
8872{
8873 IEMOP_MNEMONIC(retf, "retf");
8874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8875 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8876 | IEM_CIMPL_F_MODE,
8877 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8878 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8879 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8880 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8881 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8882 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8883 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8884 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8885 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8886 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8887 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8888 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8889 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
8890 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
8891 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
8892 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
8893 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
8894 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
8895}
8896
8897
8898/**
8899 * @opcode 0xcc
8900 */
8901FNIEMOP_DEF(iemOp_int3)
8902{
8903 IEMOP_MNEMONIC(int3, "int3");
8904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8905 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8906 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
8907 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
8908}
8909
8910
8911/**
8912 * @opcode 0xcd
8913 */
8914FNIEMOP_DEF(iemOp_int_Ib)
8915{
8916 IEMOP_MNEMONIC(int_Ib, "int Ib");
8917 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
8918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8919 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8920 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, UINT64_MAX,
8921 iemCImpl_int, u8Int, IEMINT_INTN);
8922 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8923}
8924
8925
8926/**
8927 * @opcode 0xce
8928 */
8929FNIEMOP_DEF(iemOp_into)
8930{
8931 IEMOP_MNEMONIC(into, "into");
8932 IEMOP_HLP_NO_64BIT();
8933 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8934 | IEM_CIMPL_F_BRANCH_CONDITIONAL | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8935 UINT64_MAX,
8936 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
8937 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8938}
8939
8940
8941/**
8942 * @opcode 0xcf
8943 */
8944FNIEMOP_DEF(iemOp_iret)
8945{
8946 IEMOP_MNEMONIC(iret, "iret");
8947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8948 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8949 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
8950 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8951 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8952 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8953 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8954 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
8955 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8956 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8957 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8958 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
8959 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8960 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8961 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8962 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
8963 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8964 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8965 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
8966 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
8967 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
8968 /* Segment registers are sanitized when returning to an outer ring, or fully
8969 reloaded when returning to v86 mode. Thus the large flush list above. */
8970}
8971
8972
8973/**
8974 * @opcode 0xd0
8975 */
8976FNIEMOP_DEF(iemOp_Grp2_Eb_1)
8977{
8978 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8979
8980 /* Need to use a body macro here since the EFLAGS behaviour differs between
8981 the shifts, rotates and rotate w/ carry. Sigh. */
8982#define GRP2_BODY_Eb_1(a_pImplExpr) \
8983 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8984 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8985 { \
8986 /* register */ \
8987 IEM_MC_BEGIN(3, 0, 0, 0); \
8988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8989 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8990 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
8991 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8992 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8993 IEM_MC_REF_EFLAGS(pEFlags); \
8994 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8995 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8996 IEM_MC_END(); \
8997 } \
8998 else \
8999 { \
9000 /* memory */ \
9001 IEM_MC_BEGIN(3, 3, 0, 0); \
9002 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9003 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
9004 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9006 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9007 \
9008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9010 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9011 IEM_MC_FETCH_EFLAGS(EFlags); \
9012 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9013 \
9014 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9015 IEM_MC_COMMIT_EFLAGS(EFlags); \
9016 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9017 IEM_MC_END(); \
9018 } (void)0
9019
9020 switch (IEM_GET_MODRM_REG_8(bRm))
9021 {
9022 /**
9023 * @opdone
9024 * @opmaps grp2_d0
9025 * @opcode /0
9026 * @opflclass rotate_1
9027 */
9028 case 0:
9029 {
9030 IEMOP_MNEMONIC2(M1, ROL, rol, Eb, 1, DISOPTYPE_HARMLESS, 0);
9031 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9032 break;
9033 }
9034 /**
9035 * @opdone
9036 * @opmaps grp2_d0
9037 * @opcode /1
9038 * @opflclass rotate_1
9039 */
9040 case 1:
9041 {
9042 IEMOP_MNEMONIC2(M1, ROR, ror, Eb, 1, DISOPTYPE_HARMLESS, 0);
9043 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9044 break;
9045 }
9046 /**
9047 * @opdone
9048 * @opmaps grp2_d0
9049 * @opcode /2
9050 * @opflclass rotate_carry_1
9051 */
9052 case 2:
9053 {
9054 IEMOP_MNEMONIC2(M1, RCL, rcl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9055 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9056 break;
9057 }
9058 /**
9059 * @opdone
9060 * @opmaps grp2_d0
9061 * @opcode /3
9062 * @opflclass rotate_carry_1
9063 */
9064 case 3:
9065 {
9066 IEMOP_MNEMONIC2(M1, RCR, rcr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9067 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9068 break;
9069 }
9070 /**
9071 * @opdone
9072 * @opmaps grp2_d0
9073 * @opcode /4
9074 * @opflclass shift_1
9075 */
9076 case 4:
9077 {
9078 IEMOP_MNEMONIC2(M1, SHL, shl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9079 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9080 break;
9081 }
9082 /**
9083 * @opdone
9084 * @opmaps grp2_d0
9085 * @opcode /5
9086 * @opflclass shift_1
9087 */
9088 case 5:
9089 {
9090 IEMOP_MNEMONIC2(M1, SHR, shr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9091 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9092 break;
9093 }
9094 /**
9095 * @opdone
9096 * @opmaps grp2_d0
9097 * @opcode /7
9098 * @opflclass shift_1
9099 */
9100 case 7:
9101 {
9102 IEMOP_MNEMONIC2(M1, SAR, sar, Eb, 1, DISOPTYPE_HARMLESS, 0);
9103 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9104 break;
9105 }
9106 /** @opdone */
9107 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9108 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9109 }
9110#undef GRP2_BODY_Eb_1
9111}
9112
9113
9114/* Need to use a body macro here since the EFLAGS behaviour differs between
9115 the shifts, rotates and rotate w/ carry. Sigh. */
9116#define GRP2_BODY_Ev_1(a_pImplExpr) \
9117 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9118 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9119 { \
9120 /* register */ \
9121 switch (pVCpu->iem.s.enmEffOpSize) \
9122 { \
9123 case IEMMODE_16BIT: \
9124 IEM_MC_BEGIN(3, 0, 0, 0); \
9125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9126 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9127 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9128 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9129 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9130 IEM_MC_REF_EFLAGS(pEFlags); \
9131 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9132 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9133 IEM_MC_END(); \
9134 break; \
9135 \
9136 case IEMMODE_32BIT: \
9137 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9139 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9140 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9141 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9142 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9143 IEM_MC_REF_EFLAGS(pEFlags); \
9144 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9145 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9146 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9147 IEM_MC_END(); \
9148 break; \
9149 \
9150 case IEMMODE_64BIT: \
9151 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9153 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9154 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9155 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9156 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9157 IEM_MC_REF_EFLAGS(pEFlags); \
9158 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9159 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9160 IEM_MC_END(); \
9161 break; \
9162 \
9163 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9164 } \
9165 } \
9166 else \
9167 { \
9168 /* memory */ \
9169 switch (pVCpu->iem.s.enmEffOpSize) \
9170 { \
9171 case IEMMODE_16BIT: \
9172 IEM_MC_BEGIN(3, 3, 0, 0); \
9173 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9174 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9175 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9177 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9178 \
9179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9181 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9182 IEM_MC_FETCH_EFLAGS(EFlags); \
9183 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9184 \
9185 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9186 IEM_MC_COMMIT_EFLAGS(EFlags); \
9187 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9188 IEM_MC_END(); \
9189 break; \
9190 \
9191 case IEMMODE_32BIT: \
9192 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
9193 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9194 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9195 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9196 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9197 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9198 \
9199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9201 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9202 IEM_MC_FETCH_EFLAGS(EFlags); \
9203 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9204 \
9205 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9206 IEM_MC_COMMIT_EFLAGS(EFlags); \
9207 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9208 IEM_MC_END(); \
9209 break; \
9210 \
9211 case IEMMODE_64BIT: \
9212 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
9213 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9214 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9215 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9217 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9218 \
9219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9221 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9222 IEM_MC_FETCH_EFLAGS(EFlags); \
9223 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9224 \
9225 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9226 IEM_MC_COMMIT_EFLAGS(EFlags); \
9227 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9228 IEM_MC_END(); \
9229 break; \
9230 \
9231 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9232 } \
9233 } (void)0
9234
9235/**
9236 * @opmaps grp2_d1
9237 * @opcode /0
9238 * @opflclass rotate_1
9239 */
9240FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_1, uint8_t, bRm)
9241{
9242 IEMOP_MNEMONIC2(M1, ROL, rol, Ev, 1, DISOPTYPE_HARMLESS, 0);
9243 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9244}
9245
9246
9247/**
9248 * @opmaps grp2_d1
9249 * @opcode /1
9250 * @opflclass rotate_1
9251 */
9252FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_1, uint8_t, bRm)
9253{
9254 IEMOP_MNEMONIC2(M1, ROR, ror, Ev, 1, DISOPTYPE_HARMLESS, 0);
9255 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9256}
9257
9258
9259/**
9260 * @opmaps grp2_d1
9261 * @opcode /2
9262 * @opflclass rotate_carry_1
9263 */
9264FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_1, uint8_t, bRm)
9265{
9266 IEMOP_MNEMONIC2(M1, RCL, rcl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9267 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9268}
9269
9270
9271/**
9272 * @opmaps grp2_d1
9273 * @opcode /3
9274 * @opflclass rotate_carry_1
9275 */
9276FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_1, uint8_t, bRm)
9277{
9278 IEMOP_MNEMONIC2(M1, RCR, rcr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9279 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9280}
9281
9282
9283/**
9284 * @opmaps grp2_d1
9285 * @opcode /4
9286 * @opflclass shift_1
9287 */
9288FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_1, uint8_t, bRm)
9289{
9290 IEMOP_MNEMONIC2(M1, SHL, shl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9291 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9292}
9293
9294
9295/**
9296 * @opmaps grp2_d1
9297 * @opcode /5
9298 * @opflclass shift_1
9299 */
9300FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_1, uint8_t, bRm)
9301{
9302 IEMOP_MNEMONIC2(M1, SHR, shr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9303 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9304}
9305
9306
9307/**
9308 * @opmaps grp2_d1
9309 * @opcode /7
9310 * @opflclass shift_1
9311 */
9312FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_1, uint8_t, bRm)
9313{
9314 IEMOP_MNEMONIC2(M1, SAR, sar, Ev, 1, DISOPTYPE_HARMLESS, 0);
9315 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9316}
9317
9318#undef GRP2_BODY_Ev_1
9319
9320/**
9321 * @opcode 0xd1
9322 */
9323FNIEMOP_DEF(iemOp_Grp2_Ev_1)
9324{
9325 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9326 switch (IEM_GET_MODRM_REG_8(bRm))
9327 {
9328 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_1, bRm);
9329 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_1, bRm);
9330 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_1, bRm);
9331 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_1, bRm);
9332 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_1, bRm);
9333 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_1, bRm);
9334 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_1, bRm);
9335 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9336 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9337 }
9338}
9339
9340
9341/**
9342 * @opcode 0xd2
9343 */
9344FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
9345{
9346 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9347
9348 /* Need to use a body macro here since the EFLAGS behaviour differs between
9349 the shifts, rotates and rotate w/ carry. Sigh. */
9350#define GRP2_BODY_Eb_CL(a_pImplExpr) \
9351 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9352 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9353 { \
9354 /* register */ \
9355 IEM_MC_BEGIN(3, 0, 0, 0); \
9356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9357 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9358 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9359 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9360 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9361 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9362 IEM_MC_REF_EFLAGS(pEFlags); \
9363 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9364 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9365 IEM_MC_END(); \
9366 } \
9367 else \
9368 { \
9369 /* memory */ \
9370 IEM_MC_BEGIN(3, 3, 0, 0); \
9371 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9372 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9373 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9375 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9376 \
9377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9379 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9380 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9381 IEM_MC_FETCH_EFLAGS(EFlags); \
9382 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9383 \
9384 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9385 IEM_MC_COMMIT_EFLAGS(EFlags); \
9386 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9387 IEM_MC_END(); \
9388 } (void)0
9389
9390 switch (IEM_GET_MODRM_REG_8(bRm))
9391 {
9392 /**
9393 * @opdone
9394 * @opmaps grp2_d0
9395 * @opcode /0
9396 * @opflclass rotate_count
9397 */
9398 case 0:
9399 {
9400 IEMOP_MNEMONIC2EX(rol_Eb_CL, "rol Eb,CL", M_CL, ROL, rol, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9401 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9402 break;
9403 }
9404 /**
9405 * @opdone
9406 * @opmaps grp2_d0
9407 * @opcode /1
9408 * @opflclass rotate_count
9409 */
9410 case 1:
9411 {
9412 IEMOP_MNEMONIC2EX(ror_Eb_CL, "ror Eb,CL", M_CL, ROR, ror, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9413 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9414 break;
9415 }
9416 /**
9417 * @opdone
9418 * @opmaps grp2_d0
9419 * @opcode /2
9420 * @opflclass rotate_carry_count
9421 */
9422 case 2:
9423 {
9424 IEMOP_MNEMONIC2EX(rcl_Eb_CL, "rcl Eb,CL", M_CL, RCL, rcl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9425 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9426 break;
9427 }
9428 /**
9429 * @opdone
9430 * @opmaps grp2_d0
9431 * @opcode /3
9432 * @opflclass rotate_carry_count
9433 */
9434 case 3:
9435 {
9436 IEMOP_MNEMONIC2EX(rcr_Eb_CL, "rcr Eb,CL", M_CL, RCR, rcr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9437 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9438 break;
9439 }
9440 /**
9441 * @opdone
9442 * @opmaps grp2_d0
9443 * @opcode /4
9444 * @opflclass shift_count
9445 */
9446 case 4:
9447 {
9448 IEMOP_MNEMONIC2EX(shl_Eb_CL, "shl Eb,CL", M_CL, SHL, shl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9449 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9450 break;
9451 }
9452 /**
9453 * @opdone
9454 * @opmaps grp2_d0
9455 * @opcode /5
9456 * @opflclass shift_count
9457 */
9458 case 5:
9459 {
9460 IEMOP_MNEMONIC2EX(shr_Eb_CL, "shr Eb,CL", M_CL, SHR, shr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9461 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9462 break;
9463 }
9464 /**
9465 * @opdone
9466 * @opmaps grp2_d0
9467 * @opcode /7
9468 * @opflclass shift_count
9469 */
9470 case 7:
9471 {
9472 IEMOP_MNEMONIC2EX(sar_Eb_CL, "sar Eb,CL", M_CL, SAR, sar, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9473 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9474 break;
9475 }
9476 /** @opdone */
9477 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9478 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9479 }
9480#undef GRP2_BODY_Eb_CL
9481}
9482
9483
9484/* Need to use a body macro here since the EFLAGS behaviour differs between
9485 the shifts, rotates and rotate w/ carry. Sigh. */
9486#define GRP2_BODY_Ev_CL(a_pImplExpr) \
9487 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9488 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9489 { \
9490 /* register */ \
9491 switch (pVCpu->iem.s.enmEffOpSize) \
9492 { \
9493 case IEMMODE_16BIT: \
9494 IEM_MC_BEGIN(3, 0, 0, 0); \
9495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9496 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9497 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9498 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9499 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9500 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9501 IEM_MC_REF_EFLAGS(pEFlags); \
9502 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9503 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9504 IEM_MC_END(); \
9505 break; \
9506 \
9507 case IEMMODE_32BIT: \
9508 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9510 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9511 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9512 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9513 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9514 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9515 IEM_MC_REF_EFLAGS(pEFlags); \
9516 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9517 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9518 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9519 IEM_MC_END(); \
9520 break; \
9521 \
9522 case IEMMODE_64BIT: \
9523 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9525 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9526 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9527 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9528 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9529 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9530 IEM_MC_REF_EFLAGS(pEFlags); \
9531 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9532 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9533 IEM_MC_END(); \
9534 break; \
9535 \
9536 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9537 } \
9538 } \
9539 else \
9540 { \
9541 /* memory */ \
9542 switch (pVCpu->iem.s.enmEffOpSize) \
9543 { \
9544 case IEMMODE_16BIT: \
9545 IEM_MC_BEGIN(3, 3, 0, 0); \
9546 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9547 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9548 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9550 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9551 \
9552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9554 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9555 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9556 IEM_MC_FETCH_EFLAGS(EFlags); \
9557 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9558 \
9559 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9560 IEM_MC_COMMIT_EFLAGS(EFlags); \
9561 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9562 IEM_MC_END(); \
9563 break; \
9564 \
9565 case IEMMODE_32BIT: \
9566 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
9567 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9568 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9569 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9571 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9572 \
9573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9575 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9576 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9577 IEM_MC_FETCH_EFLAGS(EFlags); \
9578 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9579 \
9580 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9581 IEM_MC_COMMIT_EFLAGS(EFlags); \
9582 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9583 IEM_MC_END(); \
9584 break; \
9585 \
9586 case IEMMODE_64BIT: \
9587 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
9588 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9589 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9590 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9592 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9593 \
9594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9596 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9597 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9598 IEM_MC_FETCH_EFLAGS(EFlags); \
9599 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9600 \
9601 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9602 IEM_MC_COMMIT_EFLAGS(EFlags); \
9603 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9604 IEM_MC_END(); \
9605 break; \
9606 \
9607 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9608 } \
9609 } (void)0
9610
9611
9612/**
9613 * @opmaps grp2_d0
9614 * @opcode /0
9615 * @opflclass rotate_count
9616 */
9617FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_CL, uint8_t, bRm)
9618{
9619 IEMOP_MNEMONIC2EX(rol_Ev_CL, "rol Ev,CL", M_CL, ROL, rol, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9620 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9621}
9622
9623
9624/**
9625 * @opmaps grp2_d0
9626 * @opcode /1
9627 * @opflclass rotate_count
9628 */
9629FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_CL, uint8_t, bRm)
9630{
9631 IEMOP_MNEMONIC2EX(ror_Ev_CL, "ror Ev,CL", M_CL, ROR, ror, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9632 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9633}
9634
9635
9636/**
9637 * @opmaps grp2_d0
9638 * @opcode /2
9639 * @opflclass rotate_carry_count
9640 */
9641FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_CL, uint8_t, bRm)
9642{
9643 IEMOP_MNEMONIC2EX(rcl_Ev_CL, "rcl Ev,CL", M_CL, RCL, rcl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9644 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9645}
9646
9647
9648/**
9649 * @opmaps grp2_d0
9650 * @opcode /3
9651 * @opflclass rotate_carry_count
9652 */
9653FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_CL, uint8_t, bRm)
9654{
9655 IEMOP_MNEMONIC2EX(rcr_Ev_CL, "rcr Ev,CL", M_CL, RCR, rcr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9656 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9657}
9658
9659
9660/**
9661 * @opmaps grp2_d0
9662 * @opcode /4
9663 * @opflclass shift_count
9664 */
9665FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_CL, uint8_t, bRm)
9666{
9667 IEMOP_MNEMONIC2EX(shl_Ev_CL, "shl Ev,CL", M_CL, SHL, shl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9668 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9669}
9670
9671
9672/**
9673 * @opmaps grp2_d0
9674 * @opcode /5
9675 * @opflclass shift_count
9676 */
9677FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_CL, uint8_t, bRm)
9678{
9679 IEMOP_MNEMONIC2EX(shr_Ev_CL, "shr Ev,CL", M_CL, SHR, shr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9680 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9681}
9682
9683
9684/**
9685 * @opmaps grp2_d0
9686 * @opcode /7
9687 * @opflclass shift_count
9688 */
9689FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_CL, uint8_t, bRm)
9690{
9691 IEMOP_MNEMONIC2EX(sar_Ev_CL, "sar Ev,CL", M_CL, SAR, sar, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9692 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9693}
9694
9695#undef GRP2_BODY_Ev_CL
9696
9697/**
9698 * @opcode 0xd3
9699 */
9700FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
9701{
9702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9703 switch (IEM_GET_MODRM_REG_8(bRm))
9704 {
9705 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_CL, bRm);
9706 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_CL, bRm);
9707 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_CL, bRm);
9708 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_CL, bRm);
9709 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_CL, bRm);
9710 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_CL, bRm);
9711 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_CL, bRm);
9712 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9713 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9714 }
9715}
9716
9717
9718/**
9719 * @opcode 0xd4
9720 * @opflmodify cf,pf,af,zf,sf,of
9721 * @opflundef cf,af,of
9722 */
9723FNIEMOP_DEF(iemOp_aam_Ib)
9724{
9725/** @todo testcase: aam */
9726 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
9727 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9729 IEMOP_HLP_NO_64BIT();
9730 if (!bImm)
9731 IEMOP_RAISE_DIVIDE_ERROR_RET();
9732 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aam, bImm);
9733}
9734
9735
9736/**
9737 * @opcode 0xd5
9738 * @opflmodify cf,pf,af,zf,sf,of
9739 * @opflundef cf,af,of
9740 */
9741FNIEMOP_DEF(iemOp_aad_Ib)
9742{
9743/** @todo testcase: aad? */
9744 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
9745 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9747 IEMOP_HLP_NO_64BIT();
9748 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aad, bImm);
9749}
9750
9751
9752/**
9753 * @opcode 0xd6
9754 */
9755FNIEMOP_DEF(iemOp_salc)
9756{
9757 IEMOP_MNEMONIC(salc, "salc");
9758 IEMOP_HLP_NO_64BIT();
9759
9760 IEM_MC_BEGIN(0, 0, 0, 0);
9761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9762 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9763 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
9764 } IEM_MC_ELSE() {
9765 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
9766 } IEM_MC_ENDIF();
9767 IEM_MC_ADVANCE_RIP_AND_FINISH();
9768 IEM_MC_END();
9769}
9770
9771
9772/**
9773 * @opcode 0xd7
9774 */
9775FNIEMOP_DEF(iemOp_xlat)
9776{
9777 IEMOP_MNEMONIC(xlat, "xlat");
9778 switch (pVCpu->iem.s.enmEffAddrMode)
9779 {
9780 case IEMMODE_16BIT:
9781 IEM_MC_BEGIN(2, 0, 0, 0);
9782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9783 IEM_MC_LOCAL(uint8_t, u8Tmp);
9784 IEM_MC_LOCAL(uint16_t, u16Addr);
9785 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
9786 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
9787 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
9788 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9789 IEM_MC_ADVANCE_RIP_AND_FINISH();
9790 IEM_MC_END();
9791 break;
9792
9793 case IEMMODE_32BIT:
9794 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
9795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9796 IEM_MC_LOCAL(uint8_t, u8Tmp);
9797 IEM_MC_LOCAL(uint32_t, u32Addr);
9798 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
9799 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
9800 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
9801 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9802 IEM_MC_ADVANCE_RIP_AND_FINISH();
9803 IEM_MC_END();
9804 break;
9805
9806 case IEMMODE_64BIT:
9807 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
9808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9809 IEM_MC_LOCAL(uint8_t, u8Tmp);
9810 IEM_MC_LOCAL(uint64_t, u64Addr);
9811 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
9812 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
9813 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
9814 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9815 IEM_MC_ADVANCE_RIP_AND_FINISH();
9816 IEM_MC_END();
9817 break;
9818
9819 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9820 }
9821}
9822
9823
9824/**
9825 * Common worker for FPU instructions working on ST0 and STn, and storing the
9826 * result in ST0.
9827 *
9828 * @param bRm Mod R/M byte.
9829 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9830 */
9831FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9832{
9833 IEM_MC_BEGIN(3, 1, 0, 0);
9834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9835 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9836 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9837 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9838 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9839
9840 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9841 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9842 IEM_MC_PREPARE_FPU_USAGE();
9843 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9844 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9845 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9846 } IEM_MC_ELSE() {
9847 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9848 } IEM_MC_ENDIF();
9849 IEM_MC_ADVANCE_RIP_AND_FINISH();
9850
9851 IEM_MC_END();
9852}
9853
9854
9855/**
9856 * Common worker for FPU instructions working on ST0 and STn, and only affecting
9857 * flags.
9858 *
9859 * @param bRm Mod R/M byte.
9860 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9861 */
9862FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9863{
9864 IEM_MC_BEGIN(3, 1, 0, 0);
9865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9866 IEM_MC_LOCAL(uint16_t, u16Fsw);
9867 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9868 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9869 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9870
9871 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9872 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9873 IEM_MC_PREPARE_FPU_USAGE();
9874 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9875 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9876 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9877 } IEM_MC_ELSE() {
9878 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9879 } IEM_MC_ENDIF();
9880 IEM_MC_ADVANCE_RIP_AND_FINISH();
9881
9882 IEM_MC_END();
9883}
9884
9885
9886/**
9887 * Common worker for FPU instructions working on ST0 and STn, only affecting
9888 * flags, and popping when done.
9889 *
9890 * @param bRm Mod R/M byte.
9891 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9892 */
9893FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9894{
9895 IEM_MC_BEGIN(3, 1, 0, 0);
9896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9897 IEM_MC_LOCAL(uint16_t, u16Fsw);
9898 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9899 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9900 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9901
9902 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9903 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9904 IEM_MC_PREPARE_FPU_USAGE();
9905 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9906 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9907 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9908 } IEM_MC_ELSE() {
9909 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9910 } IEM_MC_ENDIF();
9911 IEM_MC_ADVANCE_RIP_AND_FINISH();
9912
9913 IEM_MC_END();
9914}
9915
9916
9917/** Opcode 0xd8 11/0. */
9918FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
9919{
9920 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
9921 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
9922}
9923
9924
9925/** Opcode 0xd8 11/1. */
9926FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
9927{
9928 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
9929 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
9930}
9931
9932
9933/** Opcode 0xd8 11/2. */
9934FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
9935{
9936 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
9937 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
9938}
9939
9940
9941/** Opcode 0xd8 11/3. */
9942FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
9943{
9944 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
9945 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
9946}
9947
9948
9949/** Opcode 0xd8 11/4. */
9950FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
9951{
9952 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
9953 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
9954}
9955
9956
9957/** Opcode 0xd8 11/5. */
9958FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
9959{
9960 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
9961 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
9962}
9963
9964
9965/** Opcode 0xd8 11/6. */
9966FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
9967{
9968 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
9969 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
9970}
9971
9972
9973/** Opcode 0xd8 11/7. */
9974FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
9975{
9976 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
9977 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
9978}
9979
9980
9981/**
9982 * Common worker for FPU instructions working on ST0 and an m32r, and storing
9983 * the result in ST0.
9984 *
9985 * @param bRm Mod R/M byte.
9986 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9987 */
9988FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
9989{
9990 IEM_MC_BEGIN(3, 3, 0, 0);
9991 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9992 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9993 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9994 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9995 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9996 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9997
9998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10000
10001 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10002 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10003 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10004
10005 IEM_MC_PREPARE_FPU_USAGE();
10006 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10007 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
10008 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10009 } IEM_MC_ELSE() {
10010 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10011 } IEM_MC_ENDIF();
10012 IEM_MC_ADVANCE_RIP_AND_FINISH();
10013
10014 IEM_MC_END();
10015}
10016
10017
10018/** Opcode 0xd8 !11/0. */
10019FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
10020{
10021 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
10022 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
10023}
10024
10025
10026/** Opcode 0xd8 !11/1. */
10027FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
10028{
10029 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
10030 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
10031}
10032
10033
10034/** Opcode 0xd8 !11/2. */
10035FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
10036{
10037 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
10038
10039 IEM_MC_BEGIN(3, 3, 0, 0);
10040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10041 IEM_MC_LOCAL(uint16_t, u16Fsw);
10042 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10043 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10044 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10045 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10046
10047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10049
10050 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10051 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10052 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10053
10054 IEM_MC_PREPARE_FPU_USAGE();
10055 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10056 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10057 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10058 } IEM_MC_ELSE() {
10059 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10060 } IEM_MC_ENDIF();
10061 IEM_MC_ADVANCE_RIP_AND_FINISH();
10062
10063 IEM_MC_END();
10064}
10065
10066
10067/** Opcode 0xd8 !11/3. */
10068FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
10069{
10070 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
10071
10072 IEM_MC_BEGIN(3, 3, 0, 0);
10073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10074 IEM_MC_LOCAL(uint16_t, u16Fsw);
10075 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10076 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10077 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10078 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10079
10080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10082
10083 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10084 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10085 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10086
10087 IEM_MC_PREPARE_FPU_USAGE();
10088 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10089 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10090 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10091 } IEM_MC_ELSE() {
10092 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10093 } IEM_MC_ENDIF();
10094 IEM_MC_ADVANCE_RIP_AND_FINISH();
10095
10096 IEM_MC_END();
10097}
10098
10099
10100/** Opcode 0xd8 !11/4. */
10101FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
10102{
10103 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
10104 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
10105}
10106
10107
10108/** Opcode 0xd8 !11/5. */
10109FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
10110{
10111 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
10112 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
10113}
10114
10115
10116/** Opcode 0xd8 !11/6. */
10117FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
10118{
10119 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
10120 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
10121}
10122
10123
10124/** Opcode 0xd8 !11/7. */
10125FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
10126{
10127 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
10128 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
10129}
10130
10131
10132/**
10133 * @opcode 0xd8
10134 */
10135FNIEMOP_DEF(iemOp_EscF0)
10136{
10137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10138 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
10139
10140 if (IEM_IS_MODRM_REG_MODE(bRm))
10141 {
10142 switch (IEM_GET_MODRM_REG_8(bRm))
10143 {
10144 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
10145 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
10146 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
10147 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
10148 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
10149 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
10150 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
10151 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
10152 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10153 }
10154 }
10155 else
10156 {
10157 switch (IEM_GET_MODRM_REG_8(bRm))
10158 {
10159 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
10160 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
10161 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
10162 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
10163 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
10164 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
10165 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
10166 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
10167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10168 }
10169 }
10170}
10171
10172
10173/** Opcode 0xd9 /0 mem32real
10174 * @sa iemOp_fld_m64r */
10175FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
10176{
10177 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
10178
10179 IEM_MC_BEGIN(2, 3, 0, 0);
10180 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10181 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10182 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
10183 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10184 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
10185
10186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10188
10189 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10190 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10191 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10192 IEM_MC_PREPARE_FPU_USAGE();
10193 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10194 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
10195 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10196 } IEM_MC_ELSE() {
10197 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10198 } IEM_MC_ENDIF();
10199 IEM_MC_ADVANCE_RIP_AND_FINISH();
10200
10201 IEM_MC_END();
10202}
10203
10204
10205/** Opcode 0xd9 !11/2 mem32real */
10206FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
10207{
10208 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
10209 IEM_MC_BEGIN(3, 3, 0, 0);
10210 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10212
10213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10214 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10215 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10216 IEM_MC_PREPARE_FPU_USAGE();
10217
10218 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10219 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10220 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10221
10222 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10223 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10224 IEM_MC_LOCAL(uint16_t, u16Fsw);
10225 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10226 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10227 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10228 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10229 } IEM_MC_ELSE() {
10230 IEM_MC_IF_FCW_IM() {
10231 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10232 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10233 } IEM_MC_ELSE() {
10234 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10235 } IEM_MC_ENDIF();
10236 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10237 } IEM_MC_ENDIF();
10238 IEM_MC_ADVANCE_RIP_AND_FINISH();
10239
10240 IEM_MC_END();
10241}
10242
10243
10244/** Opcode 0xd9 !11/3 */
10245FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
10246{
10247 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
10248 IEM_MC_BEGIN(3, 3, 0, 0);
10249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10251
10252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10253 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10254 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10255 IEM_MC_PREPARE_FPU_USAGE();
10256
10257 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10258 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10259 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10260
10261 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10262 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10263 IEM_MC_LOCAL(uint16_t, u16Fsw);
10264 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10265 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10266 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10267 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10268 } IEM_MC_ELSE() {
10269 IEM_MC_IF_FCW_IM() {
10270 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10271 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10272 } IEM_MC_ELSE() {
10273 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10274 } IEM_MC_ENDIF();
10275 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10276 } IEM_MC_ENDIF();
10277 IEM_MC_ADVANCE_RIP_AND_FINISH();
10278
10279 IEM_MC_END();
10280}
10281
10282
10283/** Opcode 0xd9 !11/4 */
10284FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
10285{
10286 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
10287 IEM_MC_BEGIN(3, 0, 0, 0);
10288 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
10289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10290
10291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10292 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10293 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10294
10295 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10296 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
10297 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
10298 IEM_MC_END();
10299}
10300
10301
10302/** Opcode 0xd9 !11/5 */
10303FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
10304{
10305 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
10306 IEM_MC_BEGIN(1, 1, 0, 0);
10307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10309
10310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10311 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10312 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10313
10314 IEM_MC_ARG(uint16_t, u16Fsw, 0);
10315 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10316
10317 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, 0, iemCImpl_fldcw, u16Fsw);
10318 IEM_MC_END();
10319}
10320
10321
10322/** Opcode 0xd9 !11/6 */
10323FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
10324{
10325 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
10326 IEM_MC_BEGIN(3, 0, 0, 0);
10327 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
10328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10329
10330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10331 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10332 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10333
10334 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10335 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
10336 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
10337 IEM_MC_END();
10338}
10339
10340
10341/** Opcode 0xd9 !11/7 */
10342FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
10343{
10344 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
10345 IEM_MC_BEGIN(2, 0, 0, 0);
10346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10347 IEM_MC_LOCAL(uint16_t, u16Fcw);
10348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10350 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10351 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10352 IEM_MC_FETCH_FCW(u16Fcw);
10353 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
10354 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10355 IEM_MC_END();
10356}
10357
10358
10359/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
10360FNIEMOP_DEF(iemOp_fnop)
10361{
10362 IEMOP_MNEMONIC(fnop, "fnop");
10363 IEM_MC_BEGIN(0, 0, 0, 0);
10364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10365 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10366 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10367 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10368 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
10369 * intel optimizations. Investigate. */
10370 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10371 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10372 IEM_MC_END();
10373}
10374
10375
10376/** Opcode 0xd9 11/0 stN */
10377FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
10378{
10379 IEMOP_MNEMONIC(fld_stN, "fld stN");
10380 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10381 * indicates that it does. */
10382 IEM_MC_BEGIN(0, 2, 0, 0);
10383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10384 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10385 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10386 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10387 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10388
10389 IEM_MC_PREPARE_FPU_USAGE();
10390 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
10391 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10392 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10393 } IEM_MC_ELSE() {
10394 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
10395 } IEM_MC_ENDIF();
10396
10397 IEM_MC_ADVANCE_RIP_AND_FINISH();
10398 IEM_MC_END();
10399}
10400
10401
10402/** Opcode 0xd9 11/3 stN */
10403FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
10404{
10405 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
10406 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10407 * indicates that it does. */
10408 IEM_MC_BEGIN(2, 3, 0, 0);
10409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10410 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
10411 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
10412 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10413 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
10414 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
10415 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10416 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10417
10418 IEM_MC_PREPARE_FPU_USAGE();
10419 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10420 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
10421 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
10422 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10423 } IEM_MC_ELSE() {
10424 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
10425 } IEM_MC_ENDIF();
10426
10427 IEM_MC_ADVANCE_RIP_AND_FINISH();
10428 IEM_MC_END();
10429}
10430
10431
10432/** Opcode 0xd9 11/4, 0xdd 11/2. */
10433FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
10434{
10435 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
10436
10437 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
10438 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
10439 if (!iDstReg)
10440 {
10441 IEM_MC_BEGIN(0, 1, 0, 0);
10442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10443 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
10444 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10445 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10446
10447 IEM_MC_PREPARE_FPU_USAGE();
10448 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
10449 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10450 } IEM_MC_ELSE() {
10451 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
10452 } IEM_MC_ENDIF();
10453
10454 IEM_MC_ADVANCE_RIP_AND_FINISH();
10455 IEM_MC_END();
10456 }
10457 else
10458 {
10459 IEM_MC_BEGIN(0, 2, 0, 0);
10460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10461 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10462 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10463 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10464 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10465
10466 IEM_MC_PREPARE_FPU_USAGE();
10467 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10468 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10469 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
10470 } IEM_MC_ELSE() {
10471 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
10472 } IEM_MC_ENDIF();
10473
10474 IEM_MC_ADVANCE_RIP_AND_FINISH();
10475 IEM_MC_END();
10476 }
10477}
10478
10479
10480/**
10481 * Common worker for FPU instructions working on ST0 and replaces it with the
10482 * result, i.e. unary operators.
10483 *
10484 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10485 */
10486FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
10487{
10488 IEM_MC_BEGIN(2, 1, 0, 0);
10489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10490 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10491 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10492 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10493
10494 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10495 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10496 IEM_MC_PREPARE_FPU_USAGE();
10497 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10498 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
10499 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10500 } IEM_MC_ELSE() {
10501 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10502 } IEM_MC_ENDIF();
10503 IEM_MC_ADVANCE_RIP_AND_FINISH();
10504
10505 IEM_MC_END();
10506}
10507
10508
10509/** Opcode 0xd9 0xe0. */
10510FNIEMOP_DEF(iemOp_fchs)
10511{
10512 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
10513 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
10514}
10515
10516
10517/** Opcode 0xd9 0xe1. */
10518FNIEMOP_DEF(iemOp_fabs)
10519{
10520 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
10521 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
10522}
10523
10524
10525/** Opcode 0xd9 0xe4. */
10526FNIEMOP_DEF(iemOp_ftst)
10527{
10528 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
10529 IEM_MC_BEGIN(2, 1, 0, 0);
10530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10531 IEM_MC_LOCAL(uint16_t, u16Fsw);
10532 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10533 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10534
10535 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10536 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10537 IEM_MC_PREPARE_FPU_USAGE();
10538 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10539 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
10540 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10541 } IEM_MC_ELSE() {
10542 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10543 } IEM_MC_ENDIF();
10544 IEM_MC_ADVANCE_RIP_AND_FINISH();
10545
10546 IEM_MC_END();
10547}
10548
10549
10550/** Opcode 0xd9 0xe5. */
10551FNIEMOP_DEF(iemOp_fxam)
10552{
10553 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
10554 IEM_MC_BEGIN(2, 1, 0, 0);
10555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10556 IEM_MC_LOCAL(uint16_t, u16Fsw);
10557 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10558 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10559
10560 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10561 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10562 IEM_MC_PREPARE_FPU_USAGE();
10563 IEM_MC_REF_FPUREG(pr80Value, 0);
10564 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
10565 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10566 IEM_MC_ADVANCE_RIP_AND_FINISH();
10567
10568 IEM_MC_END();
10569}
10570
10571
10572/**
10573 * Common worker for FPU instructions pushing a constant onto the FPU stack.
10574 *
10575 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10576 */
10577FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
10578{
10579 IEM_MC_BEGIN(1, 1, 0, 0);
10580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10581 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10582 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10583
10584 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10585 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10586 IEM_MC_PREPARE_FPU_USAGE();
10587 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10588 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
10589 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10590 } IEM_MC_ELSE() {
10591 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
10592 } IEM_MC_ENDIF();
10593 IEM_MC_ADVANCE_RIP_AND_FINISH();
10594
10595 IEM_MC_END();
10596}
10597
10598
10599/** Opcode 0xd9 0xe8. */
10600FNIEMOP_DEF(iemOp_fld1)
10601{
10602 IEMOP_MNEMONIC(fld1, "fld1");
10603 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
10604}
10605
10606
10607/** Opcode 0xd9 0xe9. */
10608FNIEMOP_DEF(iemOp_fldl2t)
10609{
10610 IEMOP_MNEMONIC(fldl2t, "fldl2t");
10611 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
10612}
10613
10614
10615/** Opcode 0xd9 0xea. */
10616FNIEMOP_DEF(iemOp_fldl2e)
10617{
10618 IEMOP_MNEMONIC(fldl2e, "fldl2e");
10619 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
10620}
10621
10622/** Opcode 0xd9 0xeb. */
10623FNIEMOP_DEF(iemOp_fldpi)
10624{
10625 IEMOP_MNEMONIC(fldpi, "fldpi");
10626 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
10627}
10628
10629
10630/** Opcode 0xd9 0xec. */
10631FNIEMOP_DEF(iemOp_fldlg2)
10632{
10633 IEMOP_MNEMONIC(fldlg2, "fldlg2");
10634 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
10635}
10636
10637/** Opcode 0xd9 0xed. */
10638FNIEMOP_DEF(iemOp_fldln2)
10639{
10640 IEMOP_MNEMONIC(fldln2, "fldln2");
10641 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
10642}
10643
10644
10645/** Opcode 0xd9 0xee. */
10646FNIEMOP_DEF(iemOp_fldz)
10647{
10648 IEMOP_MNEMONIC(fldz, "fldz");
10649 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
10650}
10651
10652
10653/** Opcode 0xd9 0xf0.
10654 *
10655 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
10656 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
10657 * to produce proper results for +Inf and -Inf.
10658 *
10659 * This is probably usful in the implementation pow() and similar.
10660 */
10661FNIEMOP_DEF(iemOp_f2xm1)
10662{
10663 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
10664 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
10665}
10666
10667
10668/**
10669 * Common worker for FPU instructions working on STn and ST0, storing the result
10670 * in STn, and popping the stack unless IE, DE or ZE was raised.
10671 *
10672 * @param bRm Mod R/M byte.
10673 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10674 */
10675FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10676{
10677 IEM_MC_BEGIN(3, 1, 0, 0);
10678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10679 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10680 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10681 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10682 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10683
10684 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10685 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10686
10687 IEM_MC_PREPARE_FPU_USAGE();
10688 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10689 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10690 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10691 } IEM_MC_ELSE() {
10692 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10693 } IEM_MC_ENDIF();
10694 IEM_MC_ADVANCE_RIP_AND_FINISH();
10695
10696 IEM_MC_END();
10697}
10698
10699
10700/** Opcode 0xd9 0xf1. */
10701FNIEMOP_DEF(iemOp_fyl2x)
10702{
10703 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
10704 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
10705}
10706
10707
10708/**
10709 * Common worker for FPU instructions working on ST0 and having two outputs, one
10710 * replacing ST0 and one pushed onto the stack.
10711 *
10712 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10713 */
10714FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
10715{
10716 IEM_MC_BEGIN(2, 1, 0, 0);
10717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10718 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
10719 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
10720 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10721
10722 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10723 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10724 IEM_MC_PREPARE_FPU_USAGE();
10725 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10726 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
10727 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
10728 } IEM_MC_ELSE() {
10729 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
10730 } IEM_MC_ENDIF();
10731 IEM_MC_ADVANCE_RIP_AND_FINISH();
10732
10733 IEM_MC_END();
10734}
10735
10736
10737/** Opcode 0xd9 0xf2. */
10738FNIEMOP_DEF(iemOp_fptan)
10739{
10740 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
10741 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
10742}
10743
10744
10745/** Opcode 0xd9 0xf3. */
10746FNIEMOP_DEF(iemOp_fpatan)
10747{
10748 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
10749 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
10750}
10751
10752
10753/** Opcode 0xd9 0xf4. */
10754FNIEMOP_DEF(iemOp_fxtract)
10755{
10756 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
10757 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
10758}
10759
10760
10761/** Opcode 0xd9 0xf5. */
10762FNIEMOP_DEF(iemOp_fprem1)
10763{
10764 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
10765 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
10766}
10767
10768
10769/** Opcode 0xd9 0xf6. */
10770FNIEMOP_DEF(iemOp_fdecstp)
10771{
10772 IEMOP_MNEMONIC(fdecstp, "fdecstp");
10773 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10774 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10775 * FINCSTP and FDECSTP. */
10776 IEM_MC_BEGIN(0, 0, 0, 0);
10777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10778
10779 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10780 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10781
10782 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10783 IEM_MC_FPU_STACK_DEC_TOP();
10784 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10785
10786 IEM_MC_ADVANCE_RIP_AND_FINISH();
10787 IEM_MC_END();
10788}
10789
10790
10791/** Opcode 0xd9 0xf7. */
10792FNIEMOP_DEF(iemOp_fincstp)
10793{
10794 IEMOP_MNEMONIC(fincstp, "fincstp");
10795 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10796 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10797 * FINCSTP and FDECSTP. */
10798 IEM_MC_BEGIN(0, 0, 0, 0);
10799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10800
10801 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10802 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10803
10804 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10805 IEM_MC_FPU_STACK_INC_TOP();
10806 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10807
10808 IEM_MC_ADVANCE_RIP_AND_FINISH();
10809 IEM_MC_END();
10810}
10811
10812
10813/** Opcode 0xd9 0xf8. */
10814FNIEMOP_DEF(iemOp_fprem)
10815{
10816 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
10817 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
10818}
10819
10820
10821/** Opcode 0xd9 0xf9. */
10822FNIEMOP_DEF(iemOp_fyl2xp1)
10823{
10824 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
10825 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
10826}
10827
10828
10829/** Opcode 0xd9 0xfa. */
10830FNIEMOP_DEF(iemOp_fsqrt)
10831{
10832 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
10833 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
10834}
10835
10836
10837/** Opcode 0xd9 0xfb. */
10838FNIEMOP_DEF(iemOp_fsincos)
10839{
10840 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
10841 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
10842}
10843
10844
10845/** Opcode 0xd9 0xfc. */
10846FNIEMOP_DEF(iemOp_frndint)
10847{
10848 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
10849 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
10850}
10851
10852
10853/** Opcode 0xd9 0xfd. */
10854FNIEMOP_DEF(iemOp_fscale)
10855{
10856 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
10857 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
10858}
10859
10860
10861/** Opcode 0xd9 0xfe. */
10862FNIEMOP_DEF(iemOp_fsin)
10863{
10864 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
10865 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
10866}
10867
10868
10869/** Opcode 0xd9 0xff. */
10870FNIEMOP_DEF(iemOp_fcos)
10871{
10872 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
10873 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
10874}
10875
10876
10877/** Used by iemOp_EscF1. */
10878IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
10879{
10880 /* 0xe0 */ iemOp_fchs,
10881 /* 0xe1 */ iemOp_fabs,
10882 /* 0xe2 */ iemOp_Invalid,
10883 /* 0xe3 */ iemOp_Invalid,
10884 /* 0xe4 */ iemOp_ftst,
10885 /* 0xe5 */ iemOp_fxam,
10886 /* 0xe6 */ iemOp_Invalid,
10887 /* 0xe7 */ iemOp_Invalid,
10888 /* 0xe8 */ iemOp_fld1,
10889 /* 0xe9 */ iemOp_fldl2t,
10890 /* 0xea */ iemOp_fldl2e,
10891 /* 0xeb */ iemOp_fldpi,
10892 /* 0xec */ iemOp_fldlg2,
10893 /* 0xed */ iemOp_fldln2,
10894 /* 0xee */ iemOp_fldz,
10895 /* 0xef */ iemOp_Invalid,
10896 /* 0xf0 */ iemOp_f2xm1,
10897 /* 0xf1 */ iemOp_fyl2x,
10898 /* 0xf2 */ iemOp_fptan,
10899 /* 0xf3 */ iemOp_fpatan,
10900 /* 0xf4 */ iemOp_fxtract,
10901 /* 0xf5 */ iemOp_fprem1,
10902 /* 0xf6 */ iemOp_fdecstp,
10903 /* 0xf7 */ iemOp_fincstp,
10904 /* 0xf8 */ iemOp_fprem,
10905 /* 0xf9 */ iemOp_fyl2xp1,
10906 /* 0xfa */ iemOp_fsqrt,
10907 /* 0xfb */ iemOp_fsincos,
10908 /* 0xfc */ iemOp_frndint,
10909 /* 0xfd */ iemOp_fscale,
10910 /* 0xfe */ iemOp_fsin,
10911 /* 0xff */ iemOp_fcos
10912};
10913
10914
10915/**
10916 * @opcode 0xd9
10917 */
10918FNIEMOP_DEF(iemOp_EscF1)
10919{
10920 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10921 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
10922
10923 if (IEM_IS_MODRM_REG_MODE(bRm))
10924 {
10925 switch (IEM_GET_MODRM_REG_8(bRm))
10926 {
10927 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
10928 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
10929 case 2:
10930 if (bRm == 0xd0)
10931 return FNIEMOP_CALL(iemOp_fnop);
10932 IEMOP_RAISE_INVALID_OPCODE_RET();
10933 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
10934 case 4:
10935 case 5:
10936 case 6:
10937 case 7:
10938 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
10939 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
10940 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10941 }
10942 }
10943 else
10944 {
10945 switch (IEM_GET_MODRM_REG_8(bRm))
10946 {
10947 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
10948 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
10949 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
10950 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
10951 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
10952 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
10953 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
10954 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
10955 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10956 }
10957 }
10958}
10959
10960
10961/** Opcode 0xda 11/0. */
10962FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
10963{
10964 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
10965 IEM_MC_BEGIN(0, 1, 0, 0);
10966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10967 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10968
10969 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10970 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10971
10972 IEM_MC_PREPARE_FPU_USAGE();
10973 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10974 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10975 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10976 } IEM_MC_ENDIF();
10977 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10978 } IEM_MC_ELSE() {
10979 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10980 } IEM_MC_ENDIF();
10981 IEM_MC_ADVANCE_RIP_AND_FINISH();
10982
10983 IEM_MC_END();
10984}
10985
10986
10987/** Opcode 0xda 11/1. */
10988FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
10989{
10990 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
10991 IEM_MC_BEGIN(0, 1, 0, 0);
10992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10993 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10994
10995 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10996 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10997
10998 IEM_MC_PREPARE_FPU_USAGE();
10999 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11000 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
11001 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11002 } IEM_MC_ENDIF();
11003 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11004 } IEM_MC_ELSE() {
11005 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11006 } IEM_MC_ENDIF();
11007 IEM_MC_ADVANCE_RIP_AND_FINISH();
11008
11009 IEM_MC_END();
11010}
11011
11012
11013/** Opcode 0xda 11/2. */
11014FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
11015{
11016 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
11017 IEM_MC_BEGIN(0, 1, 0, 0);
11018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11019 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11020
11021 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11022 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11023
11024 IEM_MC_PREPARE_FPU_USAGE();
11025 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11026 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11027 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11028 } IEM_MC_ENDIF();
11029 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11030 } IEM_MC_ELSE() {
11031 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11032 } IEM_MC_ENDIF();
11033 IEM_MC_ADVANCE_RIP_AND_FINISH();
11034
11035 IEM_MC_END();
11036}
11037
11038
11039/** Opcode 0xda 11/3. */
11040FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
11041{
11042 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
11043 IEM_MC_BEGIN(0, 1, 0, 0);
11044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11045 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11046
11047 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11048 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11049
11050 IEM_MC_PREPARE_FPU_USAGE();
11051 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11052 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
11053 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11054 } IEM_MC_ENDIF();
11055 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11056 } IEM_MC_ELSE() {
11057 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11058 } IEM_MC_ENDIF();
11059 IEM_MC_ADVANCE_RIP_AND_FINISH();
11060
11061 IEM_MC_END();
11062}
11063
11064
11065/**
11066 * Common worker for FPU instructions working on ST0 and ST1, only affecting
11067 * flags, and popping twice when done.
11068 *
11069 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11070 */
11071FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
11072{
11073 IEM_MC_BEGIN(3, 1, 0, 0);
11074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11075 IEM_MC_LOCAL(uint16_t, u16Fsw);
11076 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11077 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11078 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11079
11080 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11081 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11082
11083 IEM_MC_PREPARE_FPU_USAGE();
11084 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
11085 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
11086 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11087 } IEM_MC_ELSE() {
11088 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
11089 } IEM_MC_ENDIF();
11090 IEM_MC_ADVANCE_RIP_AND_FINISH();
11091
11092 IEM_MC_END();
11093}
11094
11095
11096/** Opcode 0xda 0xe9. */
11097FNIEMOP_DEF(iemOp_fucompp)
11098{
11099 IEMOP_MNEMONIC(fucompp, "fucompp");
11100 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
11101}
11102
11103
11104/**
11105 * Common worker for FPU instructions working on ST0 and an m32i, and storing
11106 * the result in ST0.
11107 *
11108 * @param bRm Mod R/M byte.
11109 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11110 */
11111FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
11112{
11113 IEM_MC_BEGIN(3, 3, 0, 0);
11114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11115 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11116 IEM_MC_LOCAL(int32_t, i32Val2);
11117 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11118 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11119 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11120
11121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11123
11124 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11125 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11126 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11127
11128 IEM_MC_PREPARE_FPU_USAGE();
11129 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11130 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
11131 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11132 } IEM_MC_ELSE() {
11133 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11134 } IEM_MC_ENDIF();
11135 IEM_MC_ADVANCE_RIP_AND_FINISH();
11136
11137 IEM_MC_END();
11138}
11139
11140
11141/** Opcode 0xda !11/0. */
11142FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
11143{
11144 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
11145 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
11146}
11147
11148
11149/** Opcode 0xda !11/1. */
11150FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
11151{
11152 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
11153 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
11154}
11155
11156
11157/** Opcode 0xda !11/2. */
11158FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
11159{
11160 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
11161
11162 IEM_MC_BEGIN(3, 3, 0, 0);
11163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11164 IEM_MC_LOCAL(uint16_t, u16Fsw);
11165 IEM_MC_LOCAL(int32_t, i32Val2);
11166 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11167 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11168 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11169
11170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11172
11173 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11174 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11175 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11176
11177 IEM_MC_PREPARE_FPU_USAGE();
11178 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11179 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11180 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11181 } IEM_MC_ELSE() {
11182 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11183 } IEM_MC_ENDIF();
11184 IEM_MC_ADVANCE_RIP_AND_FINISH();
11185
11186 IEM_MC_END();
11187}
11188
11189
11190/** Opcode 0xda !11/3. */
11191FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
11192{
11193 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
11194
11195 IEM_MC_BEGIN(3, 3, 0, 0);
11196 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11197 IEM_MC_LOCAL(uint16_t, u16Fsw);
11198 IEM_MC_LOCAL(int32_t, i32Val2);
11199 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11200 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11201 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11202
11203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11205
11206 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11207 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11208 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11209
11210 IEM_MC_PREPARE_FPU_USAGE();
11211 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11212 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11213 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11214 } IEM_MC_ELSE() {
11215 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11216 } IEM_MC_ENDIF();
11217 IEM_MC_ADVANCE_RIP_AND_FINISH();
11218
11219 IEM_MC_END();
11220}
11221
11222
11223/** Opcode 0xda !11/4. */
11224FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
11225{
11226 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
11227 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
11228}
11229
11230
11231/** Opcode 0xda !11/5. */
11232FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
11233{
11234 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
11235 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
11236}
11237
11238
11239/** Opcode 0xda !11/6. */
11240FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
11241{
11242 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
11243 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
11244}
11245
11246
11247/** Opcode 0xda !11/7. */
11248FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
11249{
11250 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
11251 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
11252}
11253
11254
11255/**
11256 * @opcode 0xda
11257 */
11258FNIEMOP_DEF(iemOp_EscF2)
11259{
11260 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11261 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
11262 if (IEM_IS_MODRM_REG_MODE(bRm))
11263 {
11264 switch (IEM_GET_MODRM_REG_8(bRm))
11265 {
11266 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
11267 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
11268 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
11269 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
11270 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11271 case 5:
11272 if (bRm == 0xe9)
11273 return FNIEMOP_CALL(iemOp_fucompp);
11274 IEMOP_RAISE_INVALID_OPCODE_RET();
11275 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11276 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11277 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11278 }
11279 }
11280 else
11281 {
11282 switch (IEM_GET_MODRM_REG_8(bRm))
11283 {
11284 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
11285 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
11286 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
11287 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
11288 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
11289 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
11290 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
11291 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
11292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11293 }
11294 }
11295}
11296
11297
11298/** Opcode 0xdb !11/0. */
11299FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
11300{
11301 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
11302
11303 IEM_MC_BEGIN(2, 3, 0, 0);
11304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11305 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11306 IEM_MC_LOCAL(int32_t, i32Val);
11307 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11308 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
11309
11310 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11312
11313 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11314 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11315 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11316
11317 IEM_MC_PREPARE_FPU_USAGE();
11318 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11319 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
11320 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11321 } IEM_MC_ELSE() {
11322 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11323 } IEM_MC_ENDIF();
11324 IEM_MC_ADVANCE_RIP_AND_FINISH();
11325
11326 IEM_MC_END();
11327}
11328
11329
11330/** Opcode 0xdb !11/1. */
11331FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
11332{
11333 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
11334 IEM_MC_BEGIN(3, 3, 0, 0);
11335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11337
11338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11339 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11340 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11341 IEM_MC_PREPARE_FPU_USAGE();
11342
11343 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11344 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11345 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11346
11347 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11348 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11349 IEM_MC_LOCAL(uint16_t, u16Fsw);
11350 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11351 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11352 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11353 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11354 } IEM_MC_ELSE() {
11355 IEM_MC_IF_FCW_IM() {
11356 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11357 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11358 } IEM_MC_ELSE() {
11359 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11360 } IEM_MC_ENDIF();
11361 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11362 } IEM_MC_ENDIF();
11363 IEM_MC_ADVANCE_RIP_AND_FINISH();
11364
11365 IEM_MC_END();
11366}
11367
11368
11369/** Opcode 0xdb !11/2. */
11370FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
11371{
11372 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
11373 IEM_MC_BEGIN(3, 3, 0, 0);
11374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11376
11377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11378 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11379 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11380 IEM_MC_PREPARE_FPU_USAGE();
11381
11382 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11383 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11384 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11385
11386 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11387 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11388 IEM_MC_LOCAL(uint16_t, u16Fsw);
11389 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11390 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11391 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11392 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11393 } IEM_MC_ELSE() {
11394 IEM_MC_IF_FCW_IM() {
11395 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11396 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11397 } IEM_MC_ELSE() {
11398 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11399 } IEM_MC_ENDIF();
11400 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11401 } IEM_MC_ENDIF();
11402 IEM_MC_ADVANCE_RIP_AND_FINISH();
11403
11404 IEM_MC_END();
11405}
11406
11407
11408/** Opcode 0xdb !11/3. */
11409FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
11410{
11411 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
11412 IEM_MC_BEGIN(3, 2, 0, 0);
11413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11415
11416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11417 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11418 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11419 IEM_MC_PREPARE_FPU_USAGE();
11420
11421 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11422 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11423 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11424
11425 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11426 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11427 IEM_MC_LOCAL(uint16_t, u16Fsw);
11428 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11429 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11430 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11431 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11432 } IEM_MC_ELSE() {
11433 IEM_MC_IF_FCW_IM() {
11434 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11435 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11436 } IEM_MC_ELSE() {
11437 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11438 } IEM_MC_ENDIF();
11439 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11440 } IEM_MC_ENDIF();
11441 IEM_MC_ADVANCE_RIP_AND_FINISH();
11442
11443 IEM_MC_END();
11444}
11445
11446
11447/** Opcode 0xdb !11/5. */
11448FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
11449{
11450 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
11451
11452 IEM_MC_BEGIN(2, 3, 0, 0);
11453 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11454 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11455 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
11456 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11457 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
11458
11459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11461
11462 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11463 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11464 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11465
11466 IEM_MC_PREPARE_FPU_USAGE();
11467 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11468 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
11469 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11470 } IEM_MC_ELSE() {
11471 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11472 } IEM_MC_ENDIF();
11473 IEM_MC_ADVANCE_RIP_AND_FINISH();
11474
11475 IEM_MC_END();
11476}
11477
11478
11479/** Opcode 0xdb !11/7. */
11480FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
11481{
11482 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
11483 IEM_MC_BEGIN(3, 3, 0, 0);
11484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11486
11487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11488 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11489 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11490 IEM_MC_PREPARE_FPU_USAGE();
11491
11492 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11493 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
11494 IEM_MC_MEM_MAP_R80_WO(pr80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11495
11496 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11497 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11498 IEM_MC_LOCAL(uint16_t, u16Fsw);
11499 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11500 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
11501 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11502 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11503 } IEM_MC_ELSE() {
11504 IEM_MC_IF_FCW_IM() {
11505 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
11506 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11507 } IEM_MC_ELSE() {
11508 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11509 } IEM_MC_ENDIF();
11510 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11511 } IEM_MC_ENDIF();
11512 IEM_MC_ADVANCE_RIP_AND_FINISH();
11513
11514 IEM_MC_END();
11515}
11516
11517
11518/** Opcode 0xdb 11/0. */
11519FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
11520{
11521 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
11522 IEM_MC_BEGIN(0, 1, 0, 0);
11523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11524 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11525
11526 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11527 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11528
11529 IEM_MC_PREPARE_FPU_USAGE();
11530 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11531 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
11532 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11533 } IEM_MC_ENDIF();
11534 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11535 } IEM_MC_ELSE() {
11536 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11537 } IEM_MC_ENDIF();
11538 IEM_MC_ADVANCE_RIP_AND_FINISH();
11539
11540 IEM_MC_END();
11541}
11542
11543
11544/** Opcode 0xdb 11/1. */
11545FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
11546{
11547 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
11548 IEM_MC_BEGIN(0, 1, 0, 0);
11549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11550 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11551
11552 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11553 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11554
11555 IEM_MC_PREPARE_FPU_USAGE();
11556 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11557 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11558 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11559 } IEM_MC_ENDIF();
11560 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11561 } IEM_MC_ELSE() {
11562 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11563 } IEM_MC_ENDIF();
11564 IEM_MC_ADVANCE_RIP_AND_FINISH();
11565
11566 IEM_MC_END();
11567}
11568
11569
11570/** Opcode 0xdb 11/2. */
11571FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
11572{
11573 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
11574 IEM_MC_BEGIN(0, 1, 0, 0);
11575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11576 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11577
11578 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11579 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11580
11581 IEM_MC_PREPARE_FPU_USAGE();
11582 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11583 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11584 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11585 } IEM_MC_ENDIF();
11586 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11587 } IEM_MC_ELSE() {
11588 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11589 } IEM_MC_ENDIF();
11590 IEM_MC_ADVANCE_RIP_AND_FINISH();
11591
11592 IEM_MC_END();
11593}
11594
11595
11596/** Opcode 0xdb 11/3. */
11597FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
11598{
11599 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
11600 IEM_MC_BEGIN(0, 1, 0, 0);
11601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11602 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11603
11604 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11605 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11606
11607 IEM_MC_PREPARE_FPU_USAGE();
11608 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11609 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
11610 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11611 } IEM_MC_ENDIF();
11612 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11613 } IEM_MC_ELSE() {
11614 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11615 } IEM_MC_ENDIF();
11616 IEM_MC_ADVANCE_RIP_AND_FINISH();
11617
11618 IEM_MC_END();
11619}
11620
11621
11622/** Opcode 0xdb 0xe0. */
11623FNIEMOP_DEF(iemOp_fneni)
11624{
11625 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
11626 IEM_MC_BEGIN(0, 0, 0, 0);
11627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11628 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11629 IEM_MC_ADVANCE_RIP_AND_FINISH();
11630 IEM_MC_END();
11631}
11632
11633
11634/** Opcode 0xdb 0xe1. */
11635FNIEMOP_DEF(iemOp_fndisi)
11636{
11637 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
11638 IEM_MC_BEGIN(0, 0, 0, 0);
11639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11640 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11641 IEM_MC_ADVANCE_RIP_AND_FINISH();
11642 IEM_MC_END();
11643}
11644
11645
11646/** Opcode 0xdb 0xe2. */
11647FNIEMOP_DEF(iemOp_fnclex)
11648{
11649 IEMOP_MNEMONIC(fnclex, "fnclex");
11650 IEM_MC_BEGIN(0, 0, 0, 0);
11651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11652 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11653 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11654 IEM_MC_CLEAR_FSW_EX();
11655 IEM_MC_ADVANCE_RIP_AND_FINISH();
11656 IEM_MC_END();
11657}
11658
11659
11660/** Opcode 0xdb 0xe3. */
11661FNIEMOP_DEF(iemOp_fninit)
11662{
11663 IEMOP_MNEMONIC(fninit, "fninit");
11664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11665 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, 0, iemCImpl_finit, false /*fCheckXcpts*/);
11666}
11667
11668
11669/** Opcode 0xdb 0xe4. */
11670FNIEMOP_DEF(iemOp_fnsetpm)
11671{
11672 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
11673 IEM_MC_BEGIN(0, 0, 0, 0);
11674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11675 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11676 IEM_MC_ADVANCE_RIP_AND_FINISH();
11677 IEM_MC_END();
11678}
11679
11680
11681/** Opcode 0xdb 0xe5. */
11682FNIEMOP_DEF(iemOp_frstpm)
11683{
11684 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
11685#if 0 /* #UDs on newer CPUs */
11686 IEM_MC_BEGIN(0, 0, 0, 0);
11687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11688 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11689 IEM_MC_ADVANCE_RIP_AND_FINISH();
11690 IEM_MC_END();
11691 return VINF_SUCCESS;
11692#else
11693 IEMOP_RAISE_INVALID_OPCODE_RET();
11694#endif
11695}
11696
11697
11698/** Opcode 0xdb 11/5. */
11699FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
11700{
11701 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
11702 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11703 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
11704 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11705}
11706
11707
11708/** Opcode 0xdb 11/6. */
11709FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
11710{
11711 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
11712 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11713 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11714 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11715}
11716
11717
11718/**
11719 * @opcode 0xdb
11720 */
11721FNIEMOP_DEF(iemOp_EscF3)
11722{
11723 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11724 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
11725 if (IEM_IS_MODRM_REG_MODE(bRm))
11726 {
11727 switch (IEM_GET_MODRM_REG_8(bRm))
11728 {
11729 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
11730 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
11731 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
11732 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
11733 case 4:
11734 switch (bRm)
11735 {
11736 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
11737 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
11738 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
11739 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
11740 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
11741 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
11742 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
11743 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
11744 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11745 }
11746 break;
11747 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
11748 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
11749 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11750 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11751 }
11752 }
11753 else
11754 {
11755 switch (IEM_GET_MODRM_REG_8(bRm))
11756 {
11757 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
11758 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
11759 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
11760 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
11761 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11762 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
11763 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11764 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
11765 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11766 }
11767 }
11768}
11769
11770
11771/**
11772 * Common worker for FPU instructions working on STn and ST0, and storing the
11773 * result in STn unless IE, DE or ZE was raised.
11774 *
11775 * @param bRm Mod R/M byte.
11776 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11777 */
11778FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11779{
11780 IEM_MC_BEGIN(3, 1, 0, 0);
11781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11782 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11783 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11784 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11785 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11786
11787 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11788 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11789
11790 IEM_MC_PREPARE_FPU_USAGE();
11791 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
11792 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11793 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11794 } IEM_MC_ELSE() {
11795 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11796 } IEM_MC_ENDIF();
11797 IEM_MC_ADVANCE_RIP_AND_FINISH();
11798
11799 IEM_MC_END();
11800}
11801
11802
11803/** Opcode 0xdc 11/0. */
11804FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
11805{
11806 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
11807 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
11808}
11809
11810
11811/** Opcode 0xdc 11/1. */
11812FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
11813{
11814 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
11815 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
11816}
11817
11818
11819/** Opcode 0xdc 11/4. */
11820FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
11821{
11822 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
11823 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
11824}
11825
11826
11827/** Opcode 0xdc 11/5. */
11828FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
11829{
11830 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
11831 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
11832}
11833
11834
11835/** Opcode 0xdc 11/6. */
11836FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
11837{
11838 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
11839 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
11840}
11841
11842
11843/** Opcode 0xdc 11/7. */
11844FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
11845{
11846 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
11847 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
11848}
11849
11850
11851/**
11852 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
11853 * memory operand, and storing the result in ST0.
11854 *
11855 * @param bRm Mod R/M byte.
11856 * @param pfnImpl Pointer to the instruction implementation (assembly).
11857 */
11858FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
11859{
11860 IEM_MC_BEGIN(3, 3, 0, 0);
11861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11862 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11863 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
11864 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11865 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
11866 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
11867
11868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11870 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11871 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11872
11873 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11874 IEM_MC_PREPARE_FPU_USAGE();
11875 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
11876 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
11877 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11878 } IEM_MC_ELSE() {
11879 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11880 } IEM_MC_ENDIF();
11881 IEM_MC_ADVANCE_RIP_AND_FINISH();
11882
11883 IEM_MC_END();
11884}
11885
11886
11887/** Opcode 0xdc !11/0. */
11888FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
11889{
11890 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
11891 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
11892}
11893
11894
11895/** Opcode 0xdc !11/1. */
11896FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
11897{
11898 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
11899 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
11900}
11901
11902
11903/** Opcode 0xdc !11/2. */
11904FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
11905{
11906 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
11907
11908 IEM_MC_BEGIN(3, 3, 0, 0);
11909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11910 IEM_MC_LOCAL(uint16_t, u16Fsw);
11911 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11912 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11913 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11914 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11915
11916 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11918
11919 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11920 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11921 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11922
11923 IEM_MC_PREPARE_FPU_USAGE();
11924 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11925 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11926 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11927 } IEM_MC_ELSE() {
11928 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11929 } IEM_MC_ENDIF();
11930 IEM_MC_ADVANCE_RIP_AND_FINISH();
11931
11932 IEM_MC_END();
11933}
11934
11935
11936/** Opcode 0xdc !11/3. */
11937FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
11938{
11939 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
11940
11941 IEM_MC_BEGIN(3, 3, 0, 0);
11942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11943 IEM_MC_LOCAL(uint16_t, u16Fsw);
11944 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11945 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11946 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11947 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11948
11949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11951
11952 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11953 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11954 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11955
11956 IEM_MC_PREPARE_FPU_USAGE();
11957 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11958 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11959 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11960 } IEM_MC_ELSE() {
11961 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11962 } IEM_MC_ENDIF();
11963 IEM_MC_ADVANCE_RIP_AND_FINISH();
11964
11965 IEM_MC_END();
11966}
11967
11968
11969/** Opcode 0xdc !11/4. */
11970FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
11971{
11972 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
11973 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
11974}
11975
11976
11977/** Opcode 0xdc !11/5. */
11978FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
11979{
11980 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
11981 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
11982}
11983
11984
11985/** Opcode 0xdc !11/6. */
11986FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
11987{
11988 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
11989 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
11990}
11991
11992
11993/** Opcode 0xdc !11/7. */
11994FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
11995{
11996 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
11997 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
11998}
11999
12000
12001/**
12002 * @opcode 0xdc
12003 */
12004FNIEMOP_DEF(iemOp_EscF4)
12005{
12006 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12007 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
12008 if (IEM_IS_MODRM_REG_MODE(bRm))
12009 {
12010 switch (IEM_GET_MODRM_REG_8(bRm))
12011 {
12012 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
12013 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
12014 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
12015 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
12016 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
12017 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
12018 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
12019 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
12020 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12021 }
12022 }
12023 else
12024 {
12025 switch (IEM_GET_MODRM_REG_8(bRm))
12026 {
12027 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
12028 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
12029 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
12030 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
12031 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
12032 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
12033 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
12034 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
12035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12036 }
12037 }
12038}
12039
12040
12041/** Opcode 0xdd !11/0.
12042 * @sa iemOp_fld_m32r */
12043FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
12044{
12045 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
12046
12047 IEM_MC_BEGIN(2, 3, 0, 0);
12048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12049 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12050 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
12051 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12052 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
12053
12054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12056 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12057 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12058
12059 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12060 IEM_MC_PREPARE_FPU_USAGE();
12061 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12062 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
12063 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12064 } IEM_MC_ELSE() {
12065 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12066 } IEM_MC_ENDIF();
12067 IEM_MC_ADVANCE_RIP_AND_FINISH();
12068
12069 IEM_MC_END();
12070}
12071
12072
12073/** Opcode 0xdd !11/0. */
12074FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
12075{
12076 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
12077 IEM_MC_BEGIN(3, 3, 0, 0);
12078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12080
12081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12082 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12083 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12084 IEM_MC_PREPARE_FPU_USAGE();
12085
12086 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12087 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12088 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12089
12090 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12091 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12092 IEM_MC_LOCAL(uint16_t, u16Fsw);
12093 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12094 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12095 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12096 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12097 } IEM_MC_ELSE() {
12098 IEM_MC_IF_FCW_IM() {
12099 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12100 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12101 } IEM_MC_ELSE() {
12102 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12103 } IEM_MC_ENDIF();
12104 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12105 } IEM_MC_ENDIF();
12106 IEM_MC_ADVANCE_RIP_AND_FINISH();
12107
12108 IEM_MC_END();
12109}
12110
12111
12112/** Opcode 0xdd !11/0. */
12113FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
12114{
12115 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
12116 IEM_MC_BEGIN(3, 3, 0, 0);
12117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12119
12120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12121 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12122 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12123 IEM_MC_PREPARE_FPU_USAGE();
12124
12125 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12126 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12127 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12128
12129 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12130 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12131 IEM_MC_LOCAL(uint16_t, u16Fsw);
12132 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12133 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12134 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12135 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12136 } IEM_MC_ELSE() {
12137 IEM_MC_IF_FCW_IM() {
12138 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12139 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12140 } IEM_MC_ELSE() {
12141 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12142 } IEM_MC_ENDIF();
12143 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12144 } IEM_MC_ENDIF();
12145 IEM_MC_ADVANCE_RIP_AND_FINISH();
12146
12147 IEM_MC_END();
12148}
12149
12150
12151
12152
12153/** Opcode 0xdd !11/0. */
12154FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
12155{
12156 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
12157 IEM_MC_BEGIN(3, 3, 0, 0);
12158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12160
12161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12162 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12163 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12164 IEM_MC_PREPARE_FPU_USAGE();
12165
12166 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12167 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12168 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12169
12170 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12171 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12172 IEM_MC_LOCAL(uint16_t, u16Fsw);
12173 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12174 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12175 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12176 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12177 } IEM_MC_ELSE() {
12178 IEM_MC_IF_FCW_IM() {
12179 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12180 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12181 } IEM_MC_ELSE() {
12182 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12183 } IEM_MC_ENDIF();
12184 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12185 } IEM_MC_ENDIF();
12186 IEM_MC_ADVANCE_RIP_AND_FINISH();
12187
12188 IEM_MC_END();
12189}
12190
12191
12192/** Opcode 0xdd !11/0. */
12193FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
12194{
12195 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
12196 IEM_MC_BEGIN(3, 0, 0, 0);
12197 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
12198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12199
12200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12201 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12202 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12203
12204 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12205 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12206 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
12207 IEM_MC_END();
12208}
12209
12210
12211/** Opcode 0xdd !11/0. */
12212FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
12213{
12214 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
12215 IEM_MC_BEGIN(3, 0, 0, 0);
12216 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
12217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12218
12219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12220 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12221 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
12222
12223 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12224 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12225 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
12226 IEM_MC_END();
12227}
12228
12229/** Opcode 0xdd !11/0. */
12230FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
12231{
12232 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
12233
12234 IEM_MC_BEGIN(0, 2, 0, 0);
12235 IEM_MC_LOCAL(uint16_t, u16Tmp);
12236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12237
12238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12240 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12241
12242 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12243 IEM_MC_FETCH_FSW(u16Tmp);
12244 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
12245 IEM_MC_ADVANCE_RIP_AND_FINISH();
12246
12247/** @todo Debug / drop a hint to the verifier that things may differ
12248 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
12249 * NT4SP1. (X86_FSW_PE) */
12250 IEM_MC_END();
12251}
12252
12253
12254/** Opcode 0xdd 11/0. */
12255FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
12256{
12257 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
12258 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
12259 unmodified. */
12260 IEM_MC_BEGIN(0, 0, 0, 0);
12261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12262
12263 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12264 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12265
12266 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12267 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12268 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12269
12270 IEM_MC_ADVANCE_RIP_AND_FINISH();
12271 IEM_MC_END();
12272}
12273
12274
12275/** Opcode 0xdd 11/1. */
12276FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
12277{
12278 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
12279 IEM_MC_BEGIN(0, 2, 0, 0);
12280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12281 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
12282 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12283 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12284 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12285
12286 IEM_MC_PREPARE_FPU_USAGE();
12287 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12288 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
12289 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12290 } IEM_MC_ELSE() {
12291 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12292 } IEM_MC_ENDIF();
12293
12294 IEM_MC_ADVANCE_RIP_AND_FINISH();
12295 IEM_MC_END();
12296}
12297
12298
12299/** Opcode 0xdd 11/3. */
12300FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
12301{
12302 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
12303 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
12304}
12305
12306
12307/** Opcode 0xdd 11/4. */
12308FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
12309{
12310 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
12311 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
12312}
12313
12314
12315/**
12316 * @opcode 0xdd
12317 */
12318FNIEMOP_DEF(iemOp_EscF5)
12319{
12320 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12321 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
12322 if (IEM_IS_MODRM_REG_MODE(bRm))
12323 {
12324 switch (IEM_GET_MODRM_REG_8(bRm))
12325 {
12326 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
12327 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
12328 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
12329 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
12330 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
12331 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
12332 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
12333 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12335 }
12336 }
12337 else
12338 {
12339 switch (IEM_GET_MODRM_REG_8(bRm))
12340 {
12341 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
12342 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
12343 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
12344 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
12345 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
12346 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
12347 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
12348 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
12349 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12350 }
12351 }
12352}
12353
12354
12355/** Opcode 0xde 11/0. */
12356FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
12357{
12358 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
12359 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
12360}
12361
12362
12363/** Opcode 0xde 11/0. */
12364FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
12365{
12366 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
12367 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
12368}
12369
12370
12371/** Opcode 0xde 0xd9. */
12372FNIEMOP_DEF(iemOp_fcompp)
12373{
12374 IEMOP_MNEMONIC(fcompp, "fcompp");
12375 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
12376}
12377
12378
12379/** Opcode 0xde 11/4. */
12380FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
12381{
12382 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
12383 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
12384}
12385
12386
12387/** Opcode 0xde 11/5. */
12388FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
12389{
12390 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
12391 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
12392}
12393
12394
12395/** Opcode 0xde 11/6. */
12396FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
12397{
12398 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
12399 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
12400}
12401
12402
12403/** Opcode 0xde 11/7. */
12404FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
12405{
12406 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
12407 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
12408}
12409
12410
12411/**
12412 * Common worker for FPU instructions working on ST0 and an m16i, and storing
12413 * the result in ST0.
12414 *
12415 * @param bRm Mod R/M byte.
12416 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12417 */
12418FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
12419{
12420 IEM_MC_BEGIN(3, 3, 0, 0);
12421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12422 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12423 IEM_MC_LOCAL(int16_t, i16Val2);
12424 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12425 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12426 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12427
12428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12430
12431 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12432 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12433 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12434
12435 IEM_MC_PREPARE_FPU_USAGE();
12436 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12437 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
12438 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
12439 } IEM_MC_ELSE() {
12440 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12441 } IEM_MC_ENDIF();
12442 IEM_MC_ADVANCE_RIP_AND_FINISH();
12443
12444 IEM_MC_END();
12445}
12446
12447
12448/** Opcode 0xde !11/0. */
12449FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
12450{
12451 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
12452 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
12453}
12454
12455
12456/** Opcode 0xde !11/1. */
12457FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
12458{
12459 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
12460 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
12461}
12462
12463
12464/** Opcode 0xde !11/2. */
12465FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
12466{
12467 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
12468
12469 IEM_MC_BEGIN(3, 3, 0, 0);
12470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12471 IEM_MC_LOCAL(uint16_t, u16Fsw);
12472 IEM_MC_LOCAL(int16_t, i16Val2);
12473 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12474 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12475 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12476
12477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12479
12480 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12481 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12482 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12483
12484 IEM_MC_PREPARE_FPU_USAGE();
12485 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12486 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
12487 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12488 } IEM_MC_ELSE() {
12489 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12490 } IEM_MC_ENDIF();
12491 IEM_MC_ADVANCE_RIP_AND_FINISH();
12492
12493 IEM_MC_END();
12494}
12495
12496
12497/** Opcode 0xde !11/3. */
12498FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
12499{
12500 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
12501
12502 IEM_MC_BEGIN(3, 3, 0, 0);
12503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12504 IEM_MC_LOCAL(uint16_t, u16Fsw);
12505 IEM_MC_LOCAL(int16_t, i16Val2);
12506 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12507 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12508 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12509
12510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12512
12513 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12514 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12515 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12516
12517 IEM_MC_PREPARE_FPU_USAGE();
12518 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12519 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
12520 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12521 } IEM_MC_ELSE() {
12522 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12523 } IEM_MC_ENDIF();
12524 IEM_MC_ADVANCE_RIP_AND_FINISH();
12525
12526 IEM_MC_END();
12527}
12528
12529
12530/** Opcode 0xde !11/4. */
12531FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
12532{
12533 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
12534 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
12535}
12536
12537
12538/** Opcode 0xde !11/5. */
12539FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
12540{
12541 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
12542 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
12543}
12544
12545
12546/** Opcode 0xde !11/6. */
12547FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
12548{
12549 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
12550 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
12551}
12552
12553
12554/** Opcode 0xde !11/7. */
12555FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
12556{
12557 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
12558 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
12559}
12560
12561
12562/**
12563 * @opcode 0xde
12564 */
12565FNIEMOP_DEF(iemOp_EscF6)
12566{
12567 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12568 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
12569 if (IEM_IS_MODRM_REG_MODE(bRm))
12570 {
12571 switch (IEM_GET_MODRM_REG_8(bRm))
12572 {
12573 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
12574 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
12575 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
12576 case 3: if (bRm == 0xd9)
12577 return FNIEMOP_CALL(iemOp_fcompp);
12578 IEMOP_RAISE_INVALID_OPCODE_RET();
12579 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
12580 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
12581 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
12582 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
12583 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12584 }
12585 }
12586 else
12587 {
12588 switch (IEM_GET_MODRM_REG_8(bRm))
12589 {
12590 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
12591 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
12592 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
12593 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
12594 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
12595 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
12596 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
12597 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
12598 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12599 }
12600 }
12601}
12602
12603
12604/** Opcode 0xdf 11/0.
12605 * Undocument instruction, assumed to work like ffree + fincstp. */
12606FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
12607{
12608 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
12609 IEM_MC_BEGIN(0, 0, 0, 0);
12610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12611
12612 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12613 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12614
12615 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12616 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12617 IEM_MC_FPU_STACK_INC_TOP();
12618 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12619
12620 IEM_MC_ADVANCE_RIP_AND_FINISH();
12621 IEM_MC_END();
12622}
12623
12624
12625/** Opcode 0xdf 0xe0. */
12626FNIEMOP_DEF(iemOp_fnstsw_ax)
12627{
12628 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
12629 IEM_MC_BEGIN(0, 1, 0, 0);
12630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12631 IEM_MC_LOCAL(uint16_t, u16Tmp);
12632 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12633 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12634 IEM_MC_FETCH_FSW(u16Tmp);
12635 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12636 IEM_MC_ADVANCE_RIP_AND_FINISH();
12637 IEM_MC_END();
12638}
12639
12640
12641/** Opcode 0xdf 11/5. */
12642FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
12643{
12644 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
12645 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12646 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12647 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12648}
12649
12650
12651/** Opcode 0xdf 11/6. */
12652FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
12653{
12654 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
12655 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12656 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12657 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12658}
12659
12660
12661/** Opcode 0xdf !11/0. */
12662FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
12663{
12664 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
12665
12666 IEM_MC_BEGIN(2, 3, 0, 0);
12667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12668 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12669 IEM_MC_LOCAL(int16_t, i16Val);
12670 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12671 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
12672
12673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12675
12676 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12677 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12678 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12679
12680 IEM_MC_PREPARE_FPU_USAGE();
12681 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12682 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
12683 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12684 } IEM_MC_ELSE() {
12685 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12686 } IEM_MC_ENDIF();
12687 IEM_MC_ADVANCE_RIP_AND_FINISH();
12688
12689 IEM_MC_END();
12690}
12691
12692
12693/** Opcode 0xdf !11/1. */
12694FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
12695{
12696 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
12697 IEM_MC_BEGIN(3, 3, 0, 0);
12698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12700
12701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12702 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12703 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12704 IEM_MC_PREPARE_FPU_USAGE();
12705
12706 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12707 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12708 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12709
12710 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12711 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12712 IEM_MC_LOCAL(uint16_t, u16Fsw);
12713 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12714 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12715 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12716 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12717 } IEM_MC_ELSE() {
12718 IEM_MC_IF_FCW_IM() {
12719 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12720 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12721 } IEM_MC_ELSE() {
12722 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12723 } IEM_MC_ENDIF();
12724 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12725 } IEM_MC_ENDIF();
12726 IEM_MC_ADVANCE_RIP_AND_FINISH();
12727
12728 IEM_MC_END();
12729}
12730
12731
12732/** Opcode 0xdf !11/2. */
12733FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
12734{
12735 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
12736 IEM_MC_BEGIN(3, 3, 0, 0);
12737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12739
12740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12741 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12742 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12743 IEM_MC_PREPARE_FPU_USAGE();
12744
12745 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12746 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12747 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12748
12749 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12750 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12751 IEM_MC_LOCAL(uint16_t, u16Fsw);
12752 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12753 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12754 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12755 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12756 } IEM_MC_ELSE() {
12757 IEM_MC_IF_FCW_IM() {
12758 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12759 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12760 } IEM_MC_ELSE() {
12761 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12762 } IEM_MC_ENDIF();
12763 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12764 } IEM_MC_ENDIF();
12765 IEM_MC_ADVANCE_RIP_AND_FINISH();
12766
12767 IEM_MC_END();
12768}
12769
12770
12771/** Opcode 0xdf !11/3. */
12772FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
12773{
12774 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
12775 IEM_MC_BEGIN(3, 3, 0, 0);
12776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12778
12779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12780 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12781 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12782 IEM_MC_PREPARE_FPU_USAGE();
12783
12784 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12785 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12786 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12787
12788 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12789 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12790 IEM_MC_LOCAL(uint16_t, u16Fsw);
12791 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12792 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12793 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12794 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12795 } IEM_MC_ELSE() {
12796 IEM_MC_IF_FCW_IM() {
12797 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12798 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12799 } IEM_MC_ELSE() {
12800 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12801 } IEM_MC_ENDIF();
12802 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12803 } IEM_MC_ENDIF();
12804 IEM_MC_ADVANCE_RIP_AND_FINISH();
12805
12806 IEM_MC_END();
12807}
12808
12809
12810/** Opcode 0xdf !11/4. */
12811FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
12812{
12813 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
12814
12815 IEM_MC_BEGIN(2, 3, 0, 0);
12816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12817 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12818 IEM_MC_LOCAL(RTPBCD80U, d80Val);
12819 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12820 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
12821
12822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12824
12825 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12826 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12827 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12828
12829 IEM_MC_PREPARE_FPU_USAGE();
12830 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12831 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
12832 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12833 } IEM_MC_ELSE() {
12834 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12835 } IEM_MC_ENDIF();
12836 IEM_MC_ADVANCE_RIP_AND_FINISH();
12837
12838 IEM_MC_END();
12839}
12840
12841
12842/** Opcode 0xdf !11/5. */
12843FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
12844{
12845 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
12846
12847 IEM_MC_BEGIN(2, 3, 0, 0);
12848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12849 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12850 IEM_MC_LOCAL(int64_t, i64Val);
12851 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12852 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
12853
12854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12856
12857 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12858 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12859 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12860
12861 IEM_MC_PREPARE_FPU_USAGE();
12862 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12863 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
12864 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12865 } IEM_MC_ELSE() {
12866 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12867 } IEM_MC_ENDIF();
12868 IEM_MC_ADVANCE_RIP_AND_FINISH();
12869
12870 IEM_MC_END();
12871}
12872
12873
12874/** Opcode 0xdf !11/6. */
12875FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
12876{
12877 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
12878 IEM_MC_BEGIN(3, 3, 0, 0);
12879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12881
12882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12883 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12884 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12885 IEM_MC_PREPARE_FPU_USAGE();
12886
12887 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12888 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
12889 IEM_MC_MEM_MAP_D80_WO(pd80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12890
12891 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12892 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12893 IEM_MC_LOCAL(uint16_t, u16Fsw);
12894 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12895 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
12896 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12897 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12898 } IEM_MC_ELSE() {
12899 IEM_MC_IF_FCW_IM() {
12900 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
12901 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12902 } IEM_MC_ELSE() {
12903 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12904 } IEM_MC_ENDIF();
12905 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12906 } IEM_MC_ENDIF();
12907 IEM_MC_ADVANCE_RIP_AND_FINISH();
12908
12909 IEM_MC_END();
12910}
12911
12912
12913/** Opcode 0xdf !11/7. */
12914FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
12915{
12916 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
12917 IEM_MC_BEGIN(3, 3, 0, 0);
12918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12919 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12920
12921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12922 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12923 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12924 IEM_MC_PREPARE_FPU_USAGE();
12925
12926 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12927 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12928 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12929
12930 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12931 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12932 IEM_MC_LOCAL(uint16_t, u16Fsw);
12933 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12934 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12935 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12936 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12937 } IEM_MC_ELSE() {
12938 IEM_MC_IF_FCW_IM() {
12939 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12940 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12941 } IEM_MC_ELSE() {
12942 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12943 } IEM_MC_ENDIF();
12944 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12945 } IEM_MC_ENDIF();
12946 IEM_MC_ADVANCE_RIP_AND_FINISH();
12947
12948 IEM_MC_END();
12949}
12950
12951
12952/**
12953 * @opcode 0xdf
12954 */
12955FNIEMOP_DEF(iemOp_EscF7)
12956{
12957 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12958 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
12959 if (IEM_IS_MODRM_REG_MODE(bRm))
12960 {
12961 switch (IEM_GET_MODRM_REG_8(bRm))
12962 {
12963 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
12964 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
12965 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
12966 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
12967 case 4: if (bRm == 0xe0)
12968 return FNIEMOP_CALL(iemOp_fnstsw_ax);
12969 IEMOP_RAISE_INVALID_OPCODE_RET();
12970 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
12971 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
12972 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12973 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12974 }
12975 }
12976 else
12977 {
12978 switch (IEM_GET_MODRM_REG_8(bRm))
12979 {
12980 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
12981 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
12982 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
12983 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
12984 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
12985 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
12986 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
12987 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
12988 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12989 }
12990 }
12991}
12992
12993
12994/**
12995 * @opcode 0xe0
12996 * @opfltest zf
12997 */
12998FNIEMOP_DEF(iemOp_loopne_Jb)
12999{
13000 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
13001 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13002 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13003
13004 switch (pVCpu->iem.s.enmEffAddrMode)
13005 {
13006 case IEMMODE_16BIT:
13007 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13009 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13010 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13011 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13012 } IEM_MC_ELSE() {
13013 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13014 IEM_MC_ADVANCE_RIP_AND_FINISH();
13015 } IEM_MC_ENDIF();
13016 IEM_MC_END();
13017 break;
13018
13019 case IEMMODE_32BIT:
13020 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13022 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13023 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13024 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13025 } IEM_MC_ELSE() {
13026 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13027 IEM_MC_ADVANCE_RIP_AND_FINISH();
13028 } IEM_MC_ENDIF();
13029 IEM_MC_END();
13030 break;
13031
13032 case IEMMODE_64BIT:
13033 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13035 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13036 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13037 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13038 } IEM_MC_ELSE() {
13039 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13040 IEM_MC_ADVANCE_RIP_AND_FINISH();
13041 } IEM_MC_ENDIF();
13042 IEM_MC_END();
13043 break;
13044
13045 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13046 }
13047}
13048
13049
13050/**
13051 * @opcode 0xe1
13052 * @opfltest zf
13053 */
13054FNIEMOP_DEF(iemOp_loope_Jb)
13055{
13056 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
13057 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13058 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13059
13060 switch (pVCpu->iem.s.enmEffAddrMode)
13061 {
13062 case IEMMODE_16BIT:
13063 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13065 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13066 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13067 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13068 } IEM_MC_ELSE() {
13069 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13070 IEM_MC_ADVANCE_RIP_AND_FINISH();
13071 } IEM_MC_ENDIF();
13072 IEM_MC_END();
13073 break;
13074
13075 case IEMMODE_32BIT:
13076 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13078 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13079 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13080 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13081 } IEM_MC_ELSE() {
13082 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13083 IEM_MC_ADVANCE_RIP_AND_FINISH();
13084 } IEM_MC_ENDIF();
13085 IEM_MC_END();
13086 break;
13087
13088 case IEMMODE_64BIT:
13089 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13091 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13092 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13093 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13094 } IEM_MC_ELSE() {
13095 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13096 IEM_MC_ADVANCE_RIP_AND_FINISH();
13097 } IEM_MC_ENDIF();
13098 IEM_MC_END();
13099 break;
13100
13101 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13102 }
13103}
13104
13105
13106/**
13107 * @opcode 0xe2
13108 */
13109FNIEMOP_DEF(iemOp_loop_Jb)
13110{
13111 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
13112 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13113 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13114
13115 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
13116 * using the 32-bit operand size override. How can that be restarted? See
13117 * weird pseudo code in intel manual. */
13118
13119 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
13120 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
13121 * the loop causes guest crashes, but when logging it's nice to skip a few million
13122 * lines of useless output. */
13123#if defined(LOG_ENABLED)
13124 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
13125 switch (pVCpu->iem.s.enmEffAddrMode)
13126 {
13127 case IEMMODE_16BIT:
13128 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13130 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13131 IEM_MC_ADVANCE_RIP_AND_FINISH();
13132 IEM_MC_END();
13133 break;
13134
13135 case IEMMODE_32BIT:
13136 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13138 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13139 IEM_MC_ADVANCE_RIP_AND_FINISH();
13140 IEM_MC_END();
13141 break;
13142
13143 case IEMMODE_64BIT:
13144 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13146 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13147 IEM_MC_ADVANCE_RIP_AND_FINISH();
13148 IEM_MC_END();
13149 break;
13150
13151 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13152 }
13153#endif
13154
13155 switch (pVCpu->iem.s.enmEffAddrMode)
13156 {
13157 case IEMMODE_16BIT:
13158 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13160 IEM_MC_IF_CX_IS_NOT_ONE() {
13161 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13162 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13163 } IEM_MC_ELSE() {
13164 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13165 IEM_MC_ADVANCE_RIP_AND_FINISH();
13166 } IEM_MC_ENDIF();
13167 IEM_MC_END();
13168 break;
13169
13170 case IEMMODE_32BIT:
13171 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13173 IEM_MC_IF_ECX_IS_NOT_ONE() {
13174 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13175 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13176 } IEM_MC_ELSE() {
13177 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13178 IEM_MC_ADVANCE_RIP_AND_FINISH();
13179 } IEM_MC_ENDIF();
13180 IEM_MC_END();
13181 break;
13182
13183 case IEMMODE_64BIT:
13184 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13186 IEM_MC_IF_RCX_IS_NOT_ONE() {
13187 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13188 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13189 } IEM_MC_ELSE() {
13190 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13191 IEM_MC_ADVANCE_RIP_AND_FINISH();
13192 } IEM_MC_ENDIF();
13193 IEM_MC_END();
13194 break;
13195
13196 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13197 }
13198}
13199
13200
13201/**
13202 * @opcode 0xe3
13203 */
13204FNIEMOP_DEF(iemOp_jecxz_Jb)
13205{
13206 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
13207 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13208 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13209
13210 switch (pVCpu->iem.s.enmEffAddrMode)
13211 {
13212 case IEMMODE_16BIT:
13213 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13215 IEM_MC_IF_CX_IS_NZ() {
13216 IEM_MC_ADVANCE_RIP_AND_FINISH();
13217 } IEM_MC_ELSE() {
13218 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13219 } IEM_MC_ENDIF();
13220 IEM_MC_END();
13221 break;
13222
13223 case IEMMODE_32BIT:
13224 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13226 IEM_MC_IF_ECX_IS_NZ() {
13227 IEM_MC_ADVANCE_RIP_AND_FINISH();
13228 } IEM_MC_ELSE() {
13229 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13230 } IEM_MC_ENDIF();
13231 IEM_MC_END();
13232 break;
13233
13234 case IEMMODE_64BIT:
13235 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13237 IEM_MC_IF_RCX_IS_NZ() {
13238 IEM_MC_ADVANCE_RIP_AND_FINISH();
13239 } IEM_MC_ELSE() {
13240 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13241 } IEM_MC_ENDIF();
13242 IEM_MC_END();
13243 break;
13244
13245 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13246 }
13247}
13248
13249
13250/**
13251 * @opcode 0xe4
13252 * @opfltest iopl
13253 */
13254FNIEMOP_DEF(iemOp_in_AL_Ib)
13255{
13256 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
13257 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13259 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13260 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13261}
13262
13263
13264/**
13265 * @opcode 0xe5
13266 * @opfltest iopl
13267 */
13268FNIEMOP_DEF(iemOp_in_eAX_Ib)
13269{
13270 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
13271 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13273 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13274 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13275 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13276}
13277
13278
13279/**
13280 * @opcode 0xe6
13281 * @opfltest iopl
13282 */
13283FNIEMOP_DEF(iemOp_out_Ib_AL)
13284{
13285 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
13286 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13288 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13289 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13290}
13291
13292
13293/**
13294 * @opcode 0xe7
13295 * @opfltest iopl
13296 */
13297FNIEMOP_DEF(iemOp_out_Ib_eAX)
13298{
13299 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
13300 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13302 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13303 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13304 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13305}
13306
13307
13308/**
13309 * @opcode 0xe8
13310 */
13311FNIEMOP_DEF(iemOp_call_Jv)
13312{
13313 IEMOP_MNEMONIC(call_Jv, "call Jv");
13314 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13315 switch (pVCpu->iem.s.enmEffOpSize)
13316 {
13317 case IEMMODE_16BIT:
13318 {
13319 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13320 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13321 iemCImpl_call_rel_16, (int16_t)u16Imm);
13322 }
13323
13324 case IEMMODE_32BIT:
13325 {
13326 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13327 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13328 iemCImpl_call_rel_32, (int32_t)u32Imm);
13329 }
13330
13331 case IEMMODE_64BIT:
13332 {
13333 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13334 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13335 iemCImpl_call_rel_64, u64Imm);
13336 }
13337
13338 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13339 }
13340}
13341
13342
13343/**
13344 * @opcode 0xe9
13345 */
13346FNIEMOP_DEF(iemOp_jmp_Jv)
13347{
13348 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
13349 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13350 switch (pVCpu->iem.s.enmEffOpSize)
13351 {
13352 case IEMMODE_16BIT:
13353 IEM_MC_BEGIN(0, 0, 0, 0);
13354 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
13355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13356 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
13357 IEM_MC_END();
13358 break;
13359
13360 case IEMMODE_64BIT:
13361 case IEMMODE_32BIT:
13362 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13363 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
13364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13365 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
13366 IEM_MC_END();
13367 break;
13368
13369 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13370 }
13371}
13372
13373
13374/**
13375 * @opcode 0xea
13376 */
13377FNIEMOP_DEF(iemOp_jmp_Ap)
13378{
13379 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
13380 IEMOP_HLP_NO_64BIT();
13381
13382 /* Decode the far pointer address and pass it on to the far call C implementation. */
13383 uint32_t off32Seg;
13384 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
13385 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
13386 else
13387 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
13388 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
13389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13390 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
13391 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
13392 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
13393 /** @todo make task-switches, ring-switches, ++ return non-zero status */
13394}
13395
13396
13397/**
13398 * @opcode 0xeb
13399 */
13400FNIEMOP_DEF(iemOp_jmp_Jb)
13401{
13402 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
13403 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13404 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13405
13406 IEM_MC_BEGIN(0, 0, 0, 0);
13407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13408 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13409 IEM_MC_END();
13410}
13411
13412
13413/**
13414 * @opcode 0xec
13415 * @opfltest iopl
13416 */
13417FNIEMOP_DEF(iemOp_in_AL_DX)
13418{
13419 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
13420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13421 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
13422 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13423 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
13424}
13425
13426
13427/**
13428 * @opcode 0xed
13429 * @opfltest iopl
13430 */
13431FNIEMOP_DEF(iemOp_in_eAX_DX)
13432{
13433 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
13434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13435 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
13436 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13437 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13438 pVCpu->iem.s.enmEffAddrMode);
13439}
13440
13441
13442/**
13443 * @opcode 0xee
13444 * @opfltest iopl
13445 */
13446FNIEMOP_DEF(iemOp_out_DX_AL)
13447{
13448 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
13449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13450 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13451 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
13452}
13453
13454
13455/**
13456 * @opcode 0xef
13457 * @opfltest iopl
13458 */
13459FNIEMOP_DEF(iemOp_out_DX_eAX)
13460{
13461 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
13462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13463 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13464 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13465 pVCpu->iem.s.enmEffAddrMode);
13466}
13467
13468
13469/**
13470 * @opcode 0xf0
13471 */
13472FNIEMOP_DEF(iemOp_lock)
13473{
13474 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
13475 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
13476
13477 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13478 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13479}
13480
13481
13482/**
13483 * @opcode 0xf1
13484 */
13485FNIEMOP_DEF(iemOp_int1)
13486{
13487 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
13488 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
13489 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
13490 * LOADALL memo. Needs some testing. */
13491 IEMOP_HLP_MIN_386();
13492 /** @todo testcase! */
13493 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
13494 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
13495 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
13496}
13497
13498
13499/**
13500 * @opcode 0xf2
13501 */
13502FNIEMOP_DEF(iemOp_repne)
13503{
13504 /* This overrides any previous REPE prefix. */
13505 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
13506 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
13507 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
13508
13509 /* For the 4 entry opcode tables, REPNZ overrides any previous
13510 REPZ and operand size prefixes. */
13511 pVCpu->iem.s.idxPrefix = 3;
13512
13513 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13514 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13515}
13516
13517
13518/**
13519 * @opcode 0xf3
13520 */
13521FNIEMOP_DEF(iemOp_repe)
13522{
13523 /* This overrides any previous REPNE prefix. */
13524 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
13525 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
13526 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
13527
13528 /* For the 4 entry opcode tables, REPNZ overrides any previous
13529 REPNZ and operand size prefixes. */
13530 pVCpu->iem.s.idxPrefix = 2;
13531
13532 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13533 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13534}
13535
13536
13537/**
13538 * @opcode 0xf4
13539 */
13540FNIEMOP_DEF(iemOp_hlt)
13541{
13542 IEMOP_MNEMONIC(hlt, "hlt");
13543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13544 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_hlt);
13545}
13546
13547
13548/**
13549 * @opcode 0xf5
13550 * @opflmodify cf
13551 */
13552FNIEMOP_DEF(iemOp_cmc)
13553{
13554 IEMOP_MNEMONIC(cmc, "cmc");
13555 IEM_MC_BEGIN(0, 0, 0, 0);
13556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13557 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
13558 IEM_MC_ADVANCE_RIP_AND_FINISH();
13559 IEM_MC_END();
13560}
13561
13562
13563/**
13564 * Body for of 'inc/dec/not/neg Eb'.
13565 */
13566#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
13567 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
13568 { \
13569 /* register access */ \
13570 IEM_MC_BEGIN(2, 0, 0, 0); \
13571 IEMOP_HLP_DONE_DECODING(); \
13572 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13573 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13574 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
13575 IEM_MC_REF_EFLAGS(pEFlags); \
13576 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
13577 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13578 IEM_MC_END(); \
13579 } \
13580 else \
13581 { \
13582 /* memory access. */ \
13583 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
13584 { \
13585 IEM_MC_BEGIN(2, 2, 0, 0); \
13586 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13587 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13589 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13590 \
13591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
13592 IEMOP_HLP_DONE_DECODING(); \
13593 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13594 IEM_MC_FETCH_EFLAGS(EFlags); \
13595 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
13596 \
13597 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13598 IEM_MC_COMMIT_EFLAGS(EFlags); \
13599 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13600 IEM_MC_END(); \
13601 } \
13602 else \
13603 { \
13604 IEM_MC_BEGIN(2, 2, 0, 0); \
13605 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13606 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13608 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13609 \
13610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
13611 IEMOP_HLP_DONE_DECODING(); \
13612 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13613 IEM_MC_FETCH_EFLAGS(EFlags); \
13614 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
13615 \
13616 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13617 IEM_MC_COMMIT_EFLAGS(EFlags); \
13618 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13619 IEM_MC_END(); \
13620 } \
13621 } \
13622 (void)0
13623
13624
13625/**
13626 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
13627 */
13628#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
13629 if (IEM_IS_MODRM_REG_MODE(bRm)) \
13630 { \
13631 /* \
13632 * Register target \
13633 */ \
13634 switch (pVCpu->iem.s.enmEffOpSize) \
13635 { \
13636 case IEMMODE_16BIT: \
13637 IEM_MC_BEGIN(2, 0, 0, 0); \
13638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13639 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13640 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13641 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13642 IEM_MC_REF_EFLAGS(pEFlags); \
13643 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13644 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13645 IEM_MC_END(); \
13646 break; \
13647 \
13648 case IEMMODE_32BIT: \
13649 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0); \
13650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13651 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13652 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13653 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13654 IEM_MC_REF_EFLAGS(pEFlags); \
13655 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13656 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
13657 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13658 IEM_MC_END(); \
13659 break; \
13660 \
13661 case IEMMODE_64BIT: \
13662 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0); \
13663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13664 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13665 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13666 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13667 IEM_MC_REF_EFLAGS(pEFlags); \
13668 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13669 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13670 IEM_MC_END(); \
13671 break; \
13672 \
13673 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13674 } \
13675 } \
13676 else \
13677 { \
13678 /* \
13679 * Memory target. \
13680 */ \
13681 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
13682 { \
13683 switch (pVCpu->iem.s.enmEffOpSize) \
13684 { \
13685 case IEMMODE_16BIT: \
13686 IEM_MC_BEGIN(2, 3, 0, 0); \
13687 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13688 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13690 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13691 \
13692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13693 IEMOP_HLP_DONE_DECODING(); \
13694 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13695 IEM_MC_FETCH_EFLAGS(EFlags); \
13696 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13697 \
13698 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13699 IEM_MC_COMMIT_EFLAGS(EFlags); \
13700 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13701 IEM_MC_END(); \
13702 break; \
13703 \
13704 case IEMMODE_32BIT: \
13705 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13706 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13707 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13709 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13710 \
13711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13712 IEMOP_HLP_DONE_DECODING(); \
13713 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13714 IEM_MC_FETCH_EFLAGS(EFlags); \
13715 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13716 \
13717 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13718 IEM_MC_COMMIT_EFLAGS(EFlags); \
13719 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13720 IEM_MC_END(); \
13721 break; \
13722 \
13723 case IEMMODE_64BIT: \
13724 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13725 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13726 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13728 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13729 \
13730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13731 IEMOP_HLP_DONE_DECODING(); \
13732 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13733 IEM_MC_FETCH_EFLAGS(EFlags); \
13734 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13735 \
13736 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13737 IEM_MC_COMMIT_EFLAGS(EFlags); \
13738 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13739 IEM_MC_END(); \
13740 break; \
13741 \
13742 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13743 } \
13744 } \
13745 else \
13746 { \
13747 (void)0
13748
13749#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
13750 switch (pVCpu->iem.s.enmEffOpSize) \
13751 { \
13752 case IEMMODE_16BIT: \
13753 IEM_MC_BEGIN(2, 3, 0, 0); \
13754 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13755 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13756 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13757 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13758 \
13759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13760 IEMOP_HLP_DONE_DECODING(); \
13761 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13762 IEM_MC_FETCH_EFLAGS(EFlags); \
13763 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
13764 \
13765 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13766 IEM_MC_COMMIT_EFLAGS(EFlags); \
13767 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13768 IEM_MC_END(); \
13769 break; \
13770 \
13771 case IEMMODE_32BIT: \
13772 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13773 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13774 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13775 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13776 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13777 \
13778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13779 IEMOP_HLP_DONE_DECODING(); \
13780 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13781 IEM_MC_FETCH_EFLAGS(EFlags); \
13782 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
13783 \
13784 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13785 IEM_MC_COMMIT_EFLAGS(EFlags); \
13786 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13787 IEM_MC_END(); \
13788 break; \
13789 \
13790 case IEMMODE_64BIT: \
13791 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13792 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13793 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13795 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13796 \
13797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13798 IEMOP_HLP_DONE_DECODING(); \
13799 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13800 IEM_MC_FETCH_EFLAGS(EFlags); \
13801 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
13802 \
13803 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13804 IEM_MC_COMMIT_EFLAGS(EFlags); \
13805 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13806 IEM_MC_END(); \
13807 break; \
13808 \
13809 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13810 } \
13811 } \
13812 } \
13813 (void)0
13814
13815
13816/**
13817 * @opmaps grp3_f6
13818 * @opcode /0
13819 * @opflclass logical
13820 * @todo also /1
13821 */
13822FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
13823{
13824 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
13825 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
13826
13827 if (IEM_IS_MODRM_REG_MODE(bRm))
13828 {
13829 /* register access */
13830 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13831 IEM_MC_BEGIN(3, 0, 0, 0);
13832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13833 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13834 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
13835 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13836 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13837 IEM_MC_REF_EFLAGS(pEFlags);
13838 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13839 IEM_MC_ADVANCE_RIP_AND_FINISH();
13840 IEM_MC_END();
13841 }
13842 else
13843 {
13844 /* memory access. */
13845 IEM_MC_BEGIN(3, 3, 0, 0);
13846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13848
13849 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13851
13852 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13853 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
13854 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13855
13856 IEM_MC_ARG_CONST(uint8_t, u8Src, u8Imm, 1);
13857 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13858 IEM_MC_FETCH_EFLAGS(EFlags);
13859 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13860
13861 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
13862 IEM_MC_COMMIT_EFLAGS(EFlags);
13863 IEM_MC_ADVANCE_RIP_AND_FINISH();
13864 IEM_MC_END();
13865 }
13866}
13867
13868
13869/* Body for opcode 0xf6 variations /4, /5, /6 and /7. */
13870#define IEMOP_GRP3_MUL_DIV_EB(bRm, a_pfnU8Expr) \
13871 PFNIEMAIMPLMULDIVU8 const pfnU8 = (a_pfnU8Expr); \
13872 if (IEM_IS_MODRM_REG_MODE(bRm)) \
13873 { \
13874 /* register access */ \
13875 IEM_MC_BEGIN(3, 1, 0, 0); \
13876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13877 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
13878 IEM_MC_ARG(uint8_t, u8Value, 1); \
13879 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
13880 IEM_MC_LOCAL(int32_t, rc); \
13881 \
13882 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13883 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
13884 IEM_MC_REF_EFLAGS(pEFlags); \
13885 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags); \
13886 IEM_MC_IF_LOCAL_IS_Z(rc) { \
13887 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13888 } IEM_MC_ELSE() { \
13889 IEM_MC_RAISE_DIVIDE_ERROR(); \
13890 } IEM_MC_ENDIF(); \
13891 \
13892 IEM_MC_END(); \
13893 } \
13894 else \
13895 { \
13896 /* memory access. */ \
13897 IEM_MC_BEGIN(3, 2, 0, 0); \
13898 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
13899 IEM_MC_ARG(uint8_t, u8Value, 1); \
13900 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
13901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13902 IEM_MC_LOCAL(int32_t, rc); \
13903 \
13904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13906 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13907 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
13908 IEM_MC_REF_EFLAGS(pEFlags); \
13909 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags); \
13910 IEM_MC_IF_LOCAL_IS_Z(rc) { \
13911 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13912 } IEM_MC_ELSE() { \
13913 IEM_MC_RAISE_DIVIDE_ERROR(); \
13914 } IEM_MC_ENDIF(); \
13915 \
13916 IEM_MC_END(); \
13917 } (void)0
13918
13919
13920/* Body for opcode 0xf7 variant /4, /5, /6 and /7. */
13921#define IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, a_pImplExpr) \
13922 PCIEMOPMULDIVSIZES const pImpl = (a_pImplExpr); \
13923 if (IEM_IS_MODRM_REG_MODE(bRm)) \
13924 { \
13925 /* register access */ \
13926 switch (pVCpu->iem.s.enmEffOpSize) \
13927 { \
13928 case IEMMODE_16BIT: \
13929 IEM_MC_BEGIN(4, 1, 0, 0); \
13930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13931 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
13932 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
13933 IEM_MC_ARG(uint16_t, u16Value, 2); \
13934 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
13935 IEM_MC_LOCAL(int32_t, rc); \
13936 \
13937 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13938 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
13939 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
13940 IEM_MC_REF_EFLAGS(pEFlags); \
13941 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
13942 IEM_MC_IF_LOCAL_IS_Z(rc) { \
13943 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13944 } IEM_MC_ELSE() { \
13945 IEM_MC_RAISE_DIVIDE_ERROR(); \
13946 } IEM_MC_ENDIF(); \
13947 \
13948 IEM_MC_END(); \
13949 break; \
13950 \
13951 case IEMMODE_32BIT: \
13952 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386, 0); \
13953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13954 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
13955 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
13956 IEM_MC_ARG(uint32_t, u32Value, 2); \
13957 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
13958 IEM_MC_LOCAL(int32_t, rc); \
13959 \
13960 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13961 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
13962 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
13963 IEM_MC_REF_EFLAGS(pEFlags); \
13964 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
13965 IEM_MC_IF_LOCAL_IS_Z(rc) { \
13966 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
13967 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
13968 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13969 } IEM_MC_ELSE() { \
13970 IEM_MC_RAISE_DIVIDE_ERROR(); \
13971 } IEM_MC_ENDIF(); \
13972 \
13973 IEM_MC_END(); \
13974 break; \
13975 \
13976 case IEMMODE_64BIT: \
13977 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0); \
13978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13979 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
13980 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
13981 IEM_MC_ARG(uint64_t, u64Value, 2); \
13982 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
13983 IEM_MC_LOCAL(int32_t, rc); \
13984 \
13985 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13986 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
13987 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
13988 IEM_MC_REF_EFLAGS(pEFlags); \
13989 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
13990 IEM_MC_IF_LOCAL_IS_Z(rc) { \
13991 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13992 } IEM_MC_ELSE() { \
13993 IEM_MC_RAISE_DIVIDE_ERROR(); \
13994 } IEM_MC_ENDIF(); \
13995 \
13996 IEM_MC_END(); \
13997 break; \
13998 \
13999 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14000 } \
14001 } \
14002 else \
14003 { \
14004 /* memory access. */ \
14005 switch (pVCpu->iem.s.enmEffOpSize) \
14006 { \
14007 case IEMMODE_16BIT: \
14008 IEM_MC_BEGIN(4, 2, 0, 0); \
14009 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14010 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
14011 IEM_MC_ARG(uint16_t, u16Value, 2); \
14012 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14014 IEM_MC_LOCAL(int32_t, rc); \
14015 \
14016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14018 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14019 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14020 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
14021 IEM_MC_REF_EFLAGS(pEFlags); \
14022 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
14023 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14024 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14025 } IEM_MC_ELSE() { \
14026 IEM_MC_RAISE_DIVIDE_ERROR(); \
14027 } IEM_MC_ENDIF(); \
14028 \
14029 IEM_MC_END(); \
14030 break; \
14031 \
14032 case IEMMODE_32BIT: \
14033 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386, 0); \
14034 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
14035 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
14036 IEM_MC_ARG(uint32_t, u32Value, 2); \
14037 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14039 IEM_MC_LOCAL(int32_t, rc); \
14040 \
14041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14043 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14044 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
14045 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
14046 IEM_MC_REF_EFLAGS(pEFlags); \
14047 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
14048 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14049 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
14050 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
14051 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14052 } IEM_MC_ELSE() { \
14053 IEM_MC_RAISE_DIVIDE_ERROR(); \
14054 } IEM_MC_ENDIF(); \
14055 \
14056 IEM_MC_END(); \
14057 break; \
14058 \
14059 case IEMMODE_64BIT: \
14060 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT, 0); \
14061 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
14062 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
14063 IEM_MC_ARG(uint64_t, u64Value, 2); \
14064 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14065 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14066 IEM_MC_LOCAL(int32_t, rc); \
14067 \
14068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14070 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14071 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
14072 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
14073 IEM_MC_REF_EFLAGS(pEFlags); \
14074 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
14075 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14076 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14077 } IEM_MC_ELSE() { \
14078 IEM_MC_RAISE_DIVIDE_ERROR(); \
14079 } IEM_MC_ENDIF(); \
14080 \
14081 IEM_MC_END(); \
14082 break; \
14083 \
14084 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14085 } \
14086 } (void)0
14087
14088
14089/**
14090 * @opmaps grp3_f6
14091 * @opcode /2
14092 * @opflclass unchanged
14093 */
14094FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
14095{
14096/** @todo does not modify EFLAGS. */
14097 IEMOP_MNEMONIC(not_Eb, "not Eb");
14098 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
14099}
14100
14101
14102/**
14103 * @opmaps grp3_f6
14104 * @opcode /3
14105 * @opflclass arithmetic
14106 */
14107FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
14108{
14109 IEMOP_MNEMONIC(net_Eb, "neg Eb");
14110 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
14111}
14112
14113
14114/**
14115 * @opcode 0xf6
14116 */
14117FNIEMOP_DEF(iemOp_Grp3_Eb)
14118{
14119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14120 switch (IEM_GET_MODRM_REG_8(bRm))
14121 {
14122 case 0:
14123 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
14124 case 1:
14125 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
14126 case 2:
14127 return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
14128 case 3:
14129 return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
14130 case 4:
14131 {
14132 /**
14133 * @opdone
14134 * @opmaps grp3_f6
14135 * @opcode /4
14136 * @opflclass multiply
14137 */
14138 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
14139 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14140 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
14141 break;
14142 }
14143 case 5:
14144 {
14145 /**
14146 * @opdone
14147 * @opmaps grp3_f6
14148 * @opcode /5
14149 * @opflclass multiply
14150 */
14151 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
14152 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14153 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
14154 break;
14155 }
14156 case 6:
14157 {
14158 /**
14159 * @opdone
14160 * @opmaps grp3_f6
14161 * @opcode /6
14162 * @opflclass division
14163 */
14164 IEMOP_MNEMONIC(div_Eb, "div Eb");
14165 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14166 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
14167 break;
14168 }
14169 case 7:
14170 {
14171 /**
14172 * @opdone
14173 * @opmaps grp3_f6
14174 * @opcode /7
14175 * @opflclass division
14176 */
14177 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
14178 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14179 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
14180 break;
14181 }
14182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14183 }
14184}
14185
14186
14187/**
14188 * @opmaps grp3_f7
14189 * @opcode /0
14190 * @opflclass logical
14191 */
14192FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
14193{
14194 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
14195 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14196
14197 if (IEM_IS_MODRM_REG_MODE(bRm))
14198 {
14199 /* register access */
14200 switch (pVCpu->iem.s.enmEffOpSize)
14201 {
14202 case IEMMODE_16BIT:
14203 IEM_MC_BEGIN(3, 0, 0, 0);
14204 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14206 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14207 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
14208 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14209 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14210 IEM_MC_REF_EFLAGS(pEFlags);
14211 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14212 IEM_MC_ADVANCE_RIP_AND_FINISH();
14213 IEM_MC_END();
14214 break;
14215
14216 case IEMMODE_32BIT:
14217 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
14218 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14220 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14221 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
14222 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14223 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14224 IEM_MC_REF_EFLAGS(pEFlags);
14225 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14226 /* No clearing the high dword here - test doesn't write back the result. */
14227 IEM_MC_ADVANCE_RIP_AND_FINISH();
14228 IEM_MC_END();
14229 break;
14230
14231 case IEMMODE_64BIT:
14232 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
14233 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14235 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14236 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
14237 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14238 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14239 IEM_MC_REF_EFLAGS(pEFlags);
14240 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14241 IEM_MC_ADVANCE_RIP_AND_FINISH();
14242 IEM_MC_END();
14243 break;
14244
14245 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14246 }
14247 }
14248 else
14249 {
14250 /* memory access. */
14251 switch (pVCpu->iem.s.enmEffOpSize)
14252 {
14253 case IEMMODE_16BIT:
14254 IEM_MC_BEGIN(3, 3, 0, 0);
14255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
14257
14258 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14260
14261 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14262 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
14263 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14264
14265 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
14266 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14267 IEM_MC_FETCH_EFLAGS(EFlags);
14268 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14269
14270 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14271 IEM_MC_COMMIT_EFLAGS(EFlags);
14272 IEM_MC_ADVANCE_RIP_AND_FINISH();
14273 IEM_MC_END();
14274 break;
14275
14276 case IEMMODE_32BIT:
14277 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
14278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
14280
14281 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14283
14284 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14285 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
14286 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14287
14288 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
14289 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14290 IEM_MC_FETCH_EFLAGS(EFlags);
14291 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14292
14293 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14294 IEM_MC_COMMIT_EFLAGS(EFlags);
14295 IEM_MC_ADVANCE_RIP_AND_FINISH();
14296 IEM_MC_END();
14297 break;
14298
14299 case IEMMODE_64BIT:
14300 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
14301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
14303
14304 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14306
14307 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
14308 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14309 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14310
14311 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1);
14312 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14313 IEM_MC_FETCH_EFLAGS(EFlags);
14314 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14315
14316 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14317 IEM_MC_COMMIT_EFLAGS(EFlags);
14318 IEM_MC_ADVANCE_RIP_AND_FINISH();
14319 IEM_MC_END();
14320 break;
14321
14322 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14323 }
14324 }
14325}
14326
14327
14328/**
14329 * @opmaps grp3_f7
14330 * @opcode /2
14331 * @opflclass unchanged
14332 */
14333FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
14334{
14335/** @todo does not modify EFLAGS */
14336 IEMOP_MNEMONIC(not_Ev, "not Ev");
14337 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
14338 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
14339}
14340
14341
14342/**
14343 * @opmaps grp3_f7
14344 * @opcode /3
14345 * @opflclass arithmetic
14346 */
14347FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
14348{
14349 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
14350 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
14351 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
14352}
14353
14354
14355/**
14356 * @opmaps grp3_f7
14357 * @opcode /4
14358 * @opflclass multiply
14359 */
14360FNIEMOP_DEF_1(iemOp_grp3_mul_Ev, uint8_t, bRm)
14361{
14362 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
14363 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14364 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
14365}
14366
14367
14368/**
14369 * @opmaps grp3_f7
14370 * @opcode /5
14371 * @opflclass multiply
14372 */
14373FNIEMOP_DEF_1(iemOp_grp3_imul_Ev, uint8_t, bRm)
14374{
14375 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
14376 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14377 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
14378}
14379
14380
14381/**
14382 * @opmaps grp3_f7
14383 * @opcode /6
14384 * @opflclass division
14385 */
14386FNIEMOP_DEF_1(iemOp_grp3_div_Ev, uint8_t, bRm)
14387{
14388 IEMOP_MNEMONIC(div_Ev, "div Ev");
14389 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14390 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
14391}
14392
14393
14394/**
14395 * @opmaps grp3_f7
14396 * @opcode /7
14397 * @opflclass division
14398 */
14399FNIEMOP_DEF_1(iemOp_grp3_idiv_Ev, uint8_t, bRm)
14400{
14401 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
14402 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14403 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
14404}
14405
14406
14407/**
14408 * @opcode 0xf7
14409 */
14410FNIEMOP_DEF(iemOp_Grp3_Ev)
14411{
14412 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14413 switch (IEM_GET_MODRM_REG_8(bRm))
14414 {
14415 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
14416 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
14417 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
14418 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
14419 case 4: return FNIEMOP_CALL_1(iemOp_grp3_mul_Ev, bRm);
14420 case 5: return FNIEMOP_CALL_1(iemOp_grp3_imul_Ev, bRm);
14421 case 6: return FNIEMOP_CALL_1(iemOp_grp3_div_Ev, bRm);
14422 case 7: return FNIEMOP_CALL_1(iemOp_grp3_idiv_Ev, bRm);
14423 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14424 }
14425}
14426
14427
14428/**
14429 * @opcode 0xf8
14430 * @opflmodify cf
14431 * @opflclear cf
14432 */
14433FNIEMOP_DEF(iemOp_clc)
14434{
14435 IEMOP_MNEMONIC(clc, "clc");
14436 IEM_MC_BEGIN(0, 0, 0, 0);
14437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14438 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
14439 IEM_MC_ADVANCE_RIP_AND_FINISH();
14440 IEM_MC_END();
14441}
14442
14443
14444/**
14445 * @opcode 0xf9
14446 * @opflmodify cf
14447 * @opflset cf
14448 */
14449FNIEMOP_DEF(iemOp_stc)
14450{
14451 IEMOP_MNEMONIC(stc, "stc");
14452 IEM_MC_BEGIN(0, 0, 0, 0);
14453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14454 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
14455 IEM_MC_ADVANCE_RIP_AND_FINISH();
14456 IEM_MC_END();
14457}
14458
14459
14460/**
14461 * @opcode 0xfa
14462 * @opfltest iopl,vm
14463 * @opflmodify if,vif
14464 */
14465FNIEMOP_DEF(iemOp_cli)
14466{
14467 IEMOP_MNEMONIC(cli, "cli");
14468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14469 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, 0, iemCImpl_cli);
14470}
14471
14472
14473/**
14474 * @opcode 0xfb
14475 * @opfltest iopl,vm
14476 * @opflmodify if,vif
14477 */
14478FNIEMOP_DEF(iemOp_sti)
14479{
14480 IEMOP_MNEMONIC(sti, "sti");
14481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14482 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
14483 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, 0, iemCImpl_sti);
14484}
14485
14486
14487/**
14488 * @opcode 0xfc
14489 * @opflmodify df
14490 * @opflclear df
14491 */
14492FNIEMOP_DEF(iemOp_cld)
14493{
14494 IEMOP_MNEMONIC(cld, "cld");
14495 IEM_MC_BEGIN(0, 0, 0, 0);
14496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14497 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
14498 IEM_MC_ADVANCE_RIP_AND_FINISH();
14499 IEM_MC_END();
14500}
14501
14502
14503/**
14504 * @opcode 0xfd
14505 * @opflmodify df
14506 * @opflset df
14507 */
14508FNIEMOP_DEF(iemOp_std)
14509{
14510 IEMOP_MNEMONIC(std, "std");
14511 IEM_MC_BEGIN(0, 0, 0, 0);
14512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14513 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
14514 IEM_MC_ADVANCE_RIP_AND_FINISH();
14515 IEM_MC_END();
14516}
14517
14518
14519/**
14520 * @opmaps grp4
14521 * @opcode /0
14522 * @opflclass incdec
14523 */
14524FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
14525{
14526 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
14527 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
14528}
14529
14530
14531/**
14532 * @opmaps grp4
14533 * @opcode /1
14534 * @opflclass incdec
14535 */
14536FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
14537{
14538 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
14539 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
14540}
14541
14542
14543/**
14544 * @opcode 0xfe
14545 */
14546FNIEMOP_DEF(iemOp_Grp4)
14547{
14548 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14549 switch (IEM_GET_MODRM_REG_8(bRm))
14550 {
14551 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
14552 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
14553 default:
14554 /** @todo is the eff-addr decoded? */
14555 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
14556 IEMOP_RAISE_INVALID_OPCODE_RET();
14557 }
14558}
14559
14560/**
14561 * @opmaps grp5
14562 * @opcode /0
14563 * @opflclass incdec
14564 */
14565FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
14566{
14567 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
14568 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
14569 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
14570}
14571
14572
14573/**
14574 * @opmaps grp5
14575 * @opcode /1
14576 * @opflclass incdec
14577 */
14578FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
14579{
14580 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
14581 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
14582 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
14583}
14584
14585
14586/**
14587 * Opcode 0xff /2.
14588 * @param bRm The RM byte.
14589 */
14590FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
14591{
14592 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
14593 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14594
14595 if (IEM_IS_MODRM_REG_MODE(bRm))
14596 {
14597 /* The new RIP is taken from a register. */
14598 switch (pVCpu->iem.s.enmEffOpSize)
14599 {
14600 case IEMMODE_16BIT:
14601 IEM_MC_BEGIN(1, 0, 0, 0);
14602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14603 IEM_MC_ARG(uint16_t, u16Target, 0);
14604 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14605 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
14606 IEM_MC_END();
14607 break;
14608
14609 case IEMMODE_32BIT:
14610 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
14611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14612 IEM_MC_ARG(uint32_t, u32Target, 0);
14613 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14614 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
14615 IEM_MC_END();
14616 break;
14617
14618 case IEMMODE_64BIT:
14619 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
14620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14621 IEM_MC_ARG(uint64_t, u64Target, 0);
14622 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14623 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
14624 IEM_MC_END();
14625 break;
14626
14627 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14628 }
14629 }
14630 else
14631 {
14632 /* The new RIP is taken from a register. */
14633 switch (pVCpu->iem.s.enmEffOpSize)
14634 {
14635 case IEMMODE_16BIT:
14636 IEM_MC_BEGIN(1, 1, 0, 0);
14637 IEM_MC_ARG(uint16_t, u16Target, 0);
14638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14641 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14642 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
14643 IEM_MC_END();
14644 break;
14645
14646 case IEMMODE_32BIT:
14647 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
14648 IEM_MC_ARG(uint32_t, u32Target, 0);
14649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14652 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14653 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
14654 IEM_MC_END();
14655 break;
14656
14657 case IEMMODE_64BIT:
14658 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT, 0);
14659 IEM_MC_ARG(uint64_t, u64Target, 0);
14660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14663 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14664 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
14665 IEM_MC_END();
14666 break;
14667
14668 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14669 }
14670 }
14671}
14672
14673#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl, a_fCImplExtra) \
14674 /* Registers? How?? */ \
14675 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
14676 { /* likely */ } \
14677 else \
14678 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
14679 \
14680 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
14681 /** @todo what does VIA do? */ \
14682 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
14683 { /* likely */ } \
14684 else \
14685 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
14686 \
14687 /* Far pointer loaded from memory. */ \
14688 switch (pVCpu->iem.s.enmEffOpSize) \
14689 { \
14690 case IEMMODE_16BIT: \
14691 IEM_MC_BEGIN(3, 1, 0, 0); \
14692 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14693 IEM_MC_ARG(uint16_t, offSeg, 1); \
14694 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
14695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14698 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14699 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
14700 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14701 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
14702 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14703 IEM_MC_END(); \
14704 break; \
14705 \
14706 case IEMMODE_32BIT: \
14707 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
14708 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14709 IEM_MC_ARG(uint32_t, offSeg, 1); \
14710 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
14711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14714 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14715 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
14716 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14717 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
14718 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14719 IEM_MC_END(); \
14720 break; \
14721 \
14722 case IEMMODE_64BIT: \
14723 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
14724 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0); \
14725 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14726 IEM_MC_ARG(uint64_t, offSeg, 1); \
14727 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
14728 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14731 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14732 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
14733 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14734 | IEM_CIMPL_F_MODE /* no gates */, 0, \
14735 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14736 IEM_MC_END(); \
14737 break; \
14738 \
14739 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14740 } do {} while (0)
14741
14742
14743/**
14744 * Opcode 0xff /3.
14745 * @param bRm The RM byte.
14746 */
14747FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
14748{
14749 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
14750 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf, IEM_CIMPL_F_BRANCH_STACK);
14751}
14752
14753
14754/**
14755 * Opcode 0xff /4.
14756 * @param bRm The RM byte.
14757 */
14758FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
14759{
14760 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
14761 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14762
14763 if (IEM_IS_MODRM_REG_MODE(bRm))
14764 {
14765 /* The new RIP is taken from a register. */
14766 switch (pVCpu->iem.s.enmEffOpSize)
14767 {
14768 case IEMMODE_16BIT:
14769 IEM_MC_BEGIN(0, 1, 0, 0);
14770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14771 IEM_MC_LOCAL(uint16_t, u16Target);
14772 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14773 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14774 IEM_MC_END();
14775 break;
14776
14777 case IEMMODE_32BIT:
14778 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
14779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14780 IEM_MC_LOCAL(uint32_t, u32Target);
14781 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14782 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14783 IEM_MC_END();
14784 break;
14785
14786 case IEMMODE_64BIT:
14787 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
14788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14789 IEM_MC_LOCAL(uint64_t, u64Target);
14790 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14791 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14792 IEM_MC_END();
14793 break;
14794
14795 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14796 }
14797 }
14798 else
14799 {
14800 /* The new RIP is taken from a memory location. */
14801 switch (pVCpu->iem.s.enmEffOpSize)
14802 {
14803 case IEMMODE_16BIT:
14804 IEM_MC_BEGIN(0, 2, 0, 0);
14805 IEM_MC_LOCAL(uint16_t, u16Target);
14806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14809 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14810 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14811 IEM_MC_END();
14812 break;
14813
14814 case IEMMODE_32BIT:
14815 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
14816 IEM_MC_LOCAL(uint32_t, u32Target);
14817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14820 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14821 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14822 IEM_MC_END();
14823 break;
14824
14825 case IEMMODE_64BIT:
14826 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14827 IEM_MC_LOCAL(uint64_t, u64Target);
14828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14831 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14832 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14833 IEM_MC_END();
14834 break;
14835
14836 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14837 }
14838 }
14839}
14840
14841
14842/**
14843 * Opcode 0xff /5.
14844 * @param bRm The RM byte.
14845 */
14846FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
14847{
14848 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
14849 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp, 0);
14850}
14851
14852
14853/**
14854 * Opcode 0xff /6.
14855 * @param bRm The RM byte.
14856 */
14857FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
14858{
14859 IEMOP_MNEMONIC(push_Ev, "push Ev");
14860
14861 /* Registers are handled by a common worker. */
14862 if (IEM_IS_MODRM_REG_MODE(bRm))
14863 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
14864
14865 /* Memory we do here. */
14866 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14867 switch (pVCpu->iem.s.enmEffOpSize)
14868 {
14869 case IEMMODE_16BIT:
14870 IEM_MC_BEGIN(0, 2, 0, 0);
14871 IEM_MC_LOCAL(uint16_t, u16Src);
14872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14875 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14876 IEM_MC_PUSH_U16(u16Src);
14877 IEM_MC_ADVANCE_RIP_AND_FINISH();
14878 IEM_MC_END();
14879 break;
14880
14881 case IEMMODE_32BIT:
14882 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
14883 IEM_MC_LOCAL(uint32_t, u32Src);
14884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14887 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14888 IEM_MC_PUSH_U32(u32Src);
14889 IEM_MC_ADVANCE_RIP_AND_FINISH();
14890 IEM_MC_END();
14891 break;
14892
14893 case IEMMODE_64BIT:
14894 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14895 IEM_MC_LOCAL(uint64_t, u64Src);
14896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14899 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14900 IEM_MC_PUSH_U64(u64Src);
14901 IEM_MC_ADVANCE_RIP_AND_FINISH();
14902 IEM_MC_END();
14903 break;
14904
14905 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14906 }
14907}
14908
14909
14910/**
14911 * @opcode 0xff
14912 */
14913FNIEMOP_DEF(iemOp_Grp5)
14914{
14915 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14916 switch (IEM_GET_MODRM_REG_8(bRm))
14917 {
14918 case 0:
14919 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
14920 case 1:
14921 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
14922 case 2:
14923 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
14924 case 3:
14925 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
14926 case 4:
14927 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
14928 case 5:
14929 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
14930 case 6:
14931 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
14932 case 7:
14933 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
14934 IEMOP_RAISE_INVALID_OPCODE_RET();
14935 }
14936 AssertFailedReturn(VERR_IEM_IPE_3);
14937}
14938
14939
14940
14941const PFNIEMOP g_apfnOneByteMap[256] =
14942{
14943 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
14944 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
14945 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
14946 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
14947 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
14948 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
14949 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
14950 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
14951 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
14952 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
14953 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
14954 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
14955 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
14956 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
14957 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
14958 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
14959 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
14960 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
14961 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
14962 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
14963 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
14964 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
14965 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
14966 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
14967 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
14968 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
14969 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
14970 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
14971 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
14972 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
14973 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
14974 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
14975 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
14976 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
14977 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
14978 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
14979 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
14980 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
14981 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
14982 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
14983 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
14984 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
14985 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
14986 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
14987 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
14988 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
14989 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
14990 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
14991 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
14992 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
14993 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
14994 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
14995 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
14996 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
14997 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
14998 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
14999 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
15000 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
15001 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
15002 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
15003 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
15004 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
15005 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
15006 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
15007};
15008
15009
15010/** @} */
15011
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette