VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 103187

Last change on this file since 103187 was 103185, checked in by vboxsync, 14 months ago

VMM/IEMAllInst*: Liveness analysis, part 2: Flag input & modification annotations. bugref:10372

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 537.7 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 103185 2024-02-04 15:42:48Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 */
63#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8, a_fnLockedU8) \
64 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
65 \
66 /* \
67 * If rm is denoting a register, no more instruction bytes. \
68 */ \
69 if (IEM_IS_MODRM_REG_MODE(bRm)) \
70 { \
71 IEM_MC_BEGIN(3, 0, 0, 0); \
72 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
73 IEM_MC_ARG(uint8_t, u8Src, 1); \
74 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
75 \
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
77 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
78 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
79 IEM_MC_REF_EFLAGS(pEFlags); \
80 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
81 \
82 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
83 IEM_MC_END(); \
84 } \
85 else \
86 { \
87 /* \
88 * We're accessing memory. \
89 * Note! We're putting the eflags on the stack here so we can commit them \
90 * after the memory. \
91 */ \
92 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
93 { \
94 IEM_MC_BEGIN(3, 3, 0, 0); \
95 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
96 IEM_MC_ARG(uint8_t, u8Src, 1); \
97 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
98 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
99 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
100 \
101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
102 IEMOP_HLP_DONE_DECODING(); \
103 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
104 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
105 IEM_MC_FETCH_EFLAGS(EFlags); \
106 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
107 \
108 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
109 IEM_MC_COMMIT_EFLAGS(EFlags); \
110 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
111 IEM_MC_END(); \
112 } \
113 else \
114 { \
115 IEM_MC_BEGIN(3, 3, 0, 0); \
116 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
117 IEM_MC_ARG(uint8_t, u8Src, 1); \
118 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
120 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
121 \
122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
123 IEMOP_HLP_DONE_DECODING(); \
124 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
125 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
126 IEM_MC_FETCH_EFLAGS(EFlags); \
127 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
128 \
129 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bMapInfoDst); \
130 IEM_MC_COMMIT_EFLAGS(EFlags); \
131 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
132 IEM_MC_END(); \
133 } \
134 } \
135 (void)0
136
137/**
138 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
139 * operands.
140 */
141#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8) \
142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
143 \
144 /* \
145 * If rm is denoting a register, no more instruction bytes. \
146 */ \
147 if (IEM_IS_MODRM_REG_MODE(bRm)) \
148 { \
149 IEM_MC_BEGIN(3, 0, 0, 0); \
150 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
151 IEM_MC_ARG(uint8_t, u8Src, 1); \
152 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
153 \
154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
155 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
156 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
157 IEM_MC_REF_EFLAGS(pEFlags); \
158 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
159 \
160 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
161 IEM_MC_END(); \
162 } \
163 else \
164 { \
165 /* \
166 * We're accessing memory. \
167 * Note! We're putting the eflags on the stack here so we can commit them \
168 * after the memory. \
169 */ \
170 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
171 { \
172 IEM_MC_BEGIN(3, 3, 0, 0); \
173 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
174 IEM_MC_ARG(uint8_t, u8Src, 1); \
175 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
177 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
178 \
179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
180 IEMOP_HLP_DONE_DECODING(); \
181 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
182 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
183 IEM_MC_FETCH_EFLAGS(EFlags); \
184 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
185 \
186 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
187 IEM_MC_COMMIT_EFLAGS(EFlags); \
188 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
189 IEM_MC_END(); \
190 } \
191 else \
192 { \
193 IEMOP_HLP_DONE_DECODING(); \
194 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
195 } \
196 } \
197 (void)0
198
199/**
200 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
201 * destination.
202 */
203#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
204 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
205 \
206 /* \
207 * If rm is denoting a register, no more instruction bytes. \
208 */ \
209 if (IEM_IS_MODRM_REG_MODE(bRm)) \
210 { \
211 IEM_MC_BEGIN(3, 0, 0, 0); \
212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
213 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
214 IEM_MC_ARG(uint8_t, u8Src, 1); \
215 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
216 \
217 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
218 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
219 IEM_MC_REF_EFLAGS(pEFlags); \
220 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
221 \
222 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
223 IEM_MC_END(); \
224 } \
225 else \
226 { \
227 /* \
228 * We're accessing memory. \
229 */ \
230 IEM_MC_BEGIN(3, 1, 0, 0); \
231 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
232 IEM_MC_ARG(uint8_t, u8Src, 1); \
233 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
235 \
236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
238 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
239 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
240 IEM_MC_REF_EFLAGS(pEFlags); \
241 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
242 \
243 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
244 IEM_MC_END(); \
245 } \
246 (void)0
247
248
249/**
250 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
251 * memory/register as the destination.
252 */
253#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
255 \
256 /* \
257 * If rm is denoting a register, no more instruction bytes. \
258 */ \
259 if (IEM_IS_MODRM_REG_MODE(bRm)) \
260 { \
261 switch (pVCpu->iem.s.enmEffOpSize) \
262 { \
263 case IEMMODE_16BIT: \
264 IEM_MC_BEGIN(3, 0, 0, 0); \
265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
266 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
267 IEM_MC_ARG(uint16_t, u16Src, 1); \
268 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
269 \
270 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
271 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
272 IEM_MC_REF_EFLAGS(pEFlags); \
273 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
274 \
275 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
276 IEM_MC_END(); \
277 break; \
278 \
279 case IEMMODE_32BIT: \
280 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
282 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
283 IEM_MC_ARG(uint32_t, u32Src, 1); \
284 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
285 \
286 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
287 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
288 IEM_MC_REF_EFLAGS(pEFlags); \
289 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
290 \
291 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
292 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
293 IEM_MC_END(); \
294 break; \
295 \
296 case IEMMODE_64BIT: \
297 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
299 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
300 IEM_MC_ARG(uint64_t, u64Src, 1); \
301 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
302 \
303 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
304 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
305 IEM_MC_REF_EFLAGS(pEFlags); \
306 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
307 \
308 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
309 IEM_MC_END(); \
310 break; \
311 \
312 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
313 } \
314 } \
315 else \
316 { \
317 /* \
318 * We're accessing memory. \
319 * Note! We're putting the eflags on the stack here so we can commit them \
320 * after the memory. \
321 */ \
322 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
323 { \
324 switch (pVCpu->iem.s.enmEffOpSize) \
325 { \
326 case IEMMODE_16BIT: \
327 IEM_MC_BEGIN(3, 3, 0, 0); \
328 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
329 IEM_MC_ARG(uint16_t, u16Src, 1); \
330 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
332 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
333 \
334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
335 IEMOP_HLP_DONE_DECODING(); \
336 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
337 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
338 IEM_MC_FETCH_EFLAGS(EFlags); \
339 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
340 \
341 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
342 IEM_MC_COMMIT_EFLAGS(EFlags); \
343 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
344 IEM_MC_END(); \
345 break; \
346 \
347 case IEMMODE_32BIT: \
348 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
349 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
350 IEM_MC_ARG(uint32_t, u32Src, 1); \
351 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
353 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
354 \
355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
356 IEMOP_HLP_DONE_DECODING(); \
357 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
358 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
359 IEM_MC_FETCH_EFLAGS(EFlags); \
360 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
361 \
362 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
363 IEM_MC_COMMIT_EFLAGS(EFlags); \
364 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
365 IEM_MC_END(); \
366 break; \
367 \
368 case IEMMODE_64BIT: \
369 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
370 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
371 IEM_MC_ARG(uint64_t, u64Src, 1); \
372 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
374 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
375 \
376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
377 IEMOP_HLP_DONE_DECODING(); \
378 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
379 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
380 IEM_MC_FETCH_EFLAGS(EFlags); \
381 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
382 \
383 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
384 IEM_MC_COMMIT_EFLAGS(EFlags); \
385 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
386 IEM_MC_END(); \
387 break; \
388 \
389 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
390 } \
391 } \
392 else \
393 { \
394 (void)0
395/* Separate macro to work around parsing issue in IEMAllInstPython.py */
396#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
397 switch (pVCpu->iem.s.enmEffOpSize) \
398 { \
399 case IEMMODE_16BIT: \
400 IEM_MC_BEGIN(3, 3, 0, 0); \
401 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
402 IEM_MC_ARG(uint16_t, u16Src, 1); \
403 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
405 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
406 \
407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
408 IEMOP_HLP_DONE_DECODING(); \
409 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
410 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
411 IEM_MC_FETCH_EFLAGS(EFlags); \
412 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
413 \
414 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
415 IEM_MC_COMMIT_EFLAGS(EFlags); \
416 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
417 IEM_MC_END(); \
418 break; \
419 \
420 case IEMMODE_32BIT: \
421 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
422 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
423 IEM_MC_ARG(uint32_t, u32Src, 1); \
424 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
426 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
427 \
428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
429 IEMOP_HLP_DONE_DECODING(); \
430 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
431 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
432 IEM_MC_FETCH_EFLAGS(EFlags); \
433 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
434 \
435 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo /* CMP,TEST */); \
436 IEM_MC_COMMIT_EFLAGS(EFlags); \
437 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
438 IEM_MC_END(); \
439 break; \
440 \
441 case IEMMODE_64BIT: \
442 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
443 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
444 IEM_MC_ARG(uint64_t, u64Src, 1); \
445 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
447 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
448 \
449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
450 IEMOP_HLP_DONE_DECODING(); \
451 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
452 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
453 IEM_MC_FETCH_EFLAGS(EFlags); \
454 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
455 \
456 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
457 IEM_MC_COMMIT_EFLAGS(EFlags); \
458 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
459 IEM_MC_END(); \
460 break; \
461 \
462 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
463 } \
464 } \
465 } \
466 (void)0
467
468/**
469 * Body for read-only word/dword/qword instructions like TEST and CMP with
470 * memory/register as the destination.
471 */
472#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
474 \
475 /* \
476 * If rm is denoting a register, no more instruction bytes. \
477 */ \
478 if (IEM_IS_MODRM_REG_MODE(bRm)) \
479 { \
480 switch (pVCpu->iem.s.enmEffOpSize) \
481 { \
482 case IEMMODE_16BIT: \
483 IEM_MC_BEGIN(3, 0, 0, 0); \
484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
485 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
486 IEM_MC_ARG(uint16_t, u16Src, 1); \
487 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
488 \
489 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
490 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
491 IEM_MC_REF_EFLAGS(pEFlags); \
492 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
493 \
494 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
495 IEM_MC_END(); \
496 break; \
497 \
498 case IEMMODE_32BIT: \
499 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
501 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
502 IEM_MC_ARG(uint32_t, u32Src, 1); \
503 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
504 \
505 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
506 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
507 IEM_MC_REF_EFLAGS(pEFlags); \
508 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
509 \
510 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
511 IEM_MC_END(); \
512 break; \
513 \
514 case IEMMODE_64BIT: \
515 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
517 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
518 IEM_MC_ARG(uint64_t, u64Src, 1); \
519 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
520 \
521 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
522 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
523 IEM_MC_REF_EFLAGS(pEFlags); \
524 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
525 \
526 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
527 IEM_MC_END(); \
528 break; \
529 \
530 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
531 } \
532 } \
533 else \
534 { \
535 /* \
536 * We're accessing memory. \
537 * Note! We're putting the eflags on the stack here so we can commit them \
538 * after the memory. \
539 */ \
540 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
541 { \
542 switch (pVCpu->iem.s.enmEffOpSize) \
543 { \
544 case IEMMODE_16BIT: \
545 IEM_MC_BEGIN(3, 3, 0, 0); \
546 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
547 IEM_MC_ARG(uint16_t, u16Src, 1); \
548 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
550 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
551 \
552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
553 IEMOP_HLP_DONE_DECODING(); \
554 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
555 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
556 IEM_MC_FETCH_EFLAGS(EFlags); \
557 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
558 \
559 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
560 IEM_MC_COMMIT_EFLAGS(EFlags); \
561 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
562 IEM_MC_END(); \
563 break; \
564 \
565 case IEMMODE_32BIT: \
566 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
567 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
568 IEM_MC_ARG(uint32_t, u32Src, 1); \
569 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
571 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
572 \
573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
574 IEMOP_HLP_DONE_DECODING(); \
575 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
576 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
577 IEM_MC_FETCH_EFLAGS(EFlags); \
578 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
579 \
580 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
581 IEM_MC_COMMIT_EFLAGS(EFlags); \
582 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
583 IEM_MC_END(); \
584 break; \
585 \
586 case IEMMODE_64BIT: \
587 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
588 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
589 IEM_MC_ARG(uint64_t, u64Src, 1); \
590 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
592 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
593 \
594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
595 IEMOP_HLP_DONE_DECODING(); \
596 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
597 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
598 IEM_MC_FETCH_EFLAGS(EFlags); \
599 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
600 \
601 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
602 IEM_MC_COMMIT_EFLAGS(EFlags); \
603 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
604 IEM_MC_END(); \
605 break; \
606 \
607 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
608 } \
609 } \
610 else \
611 { \
612 IEMOP_HLP_DONE_DECODING(); \
613 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
614 } \
615 } \
616 (void)0
617
618
619/**
620 * Body for instructions like ADD, AND, OR, ++ with working on AL with
621 * a byte immediate.
622 */
623#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
624 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
625 \
626 IEM_MC_BEGIN(3, 0, 0, 0); \
627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
628 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
629 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
630 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
631 \
632 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
633 IEM_MC_REF_EFLAGS(pEFlags); \
634 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
635 \
636 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
637 IEM_MC_END()
638
639/**
640 * Body for instructions like ADD, AND, OR, ++ with working on
641 * AX/EAX/RAX with a word/dword immediate.
642 */
643#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
644 switch (pVCpu->iem.s.enmEffOpSize) \
645 { \
646 case IEMMODE_16BIT: \
647 { \
648 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
649 \
650 IEM_MC_BEGIN(3, 0, 0, 0); \
651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
652 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
653 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
654 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
655 \
656 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
657 IEM_MC_REF_EFLAGS(pEFlags); \
658 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
659 \
660 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
661 IEM_MC_END(); \
662 } \
663 \
664 case IEMMODE_32BIT: \
665 { \
666 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
667 \
668 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
670 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
671 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
672 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
673 \
674 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
675 IEM_MC_REF_EFLAGS(pEFlags); \
676 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
677 \
678 if (a_fModifiesDstReg) \
679 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
680 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
681 IEM_MC_END(); \
682 } \
683 \
684 case IEMMODE_64BIT: \
685 { \
686 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
687 \
688 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
690 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
691 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
692 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
693 \
694 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
695 IEM_MC_REF_EFLAGS(pEFlags); \
696 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
697 \
698 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
699 IEM_MC_END(); \
700 } \
701 \
702 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
703 } \
704 (void)0
705
706
707
708/* Instruction specification format - work in progress: */
709
710/**
711 * @opcode 0x00
712 * @opmnemonic add
713 * @op1 rm:Eb
714 * @op2 reg:Gb
715 * @opmaps one
716 * @openc ModR/M
717 * @opflclass arithmetic
718 * @ophints harmless ignores_op_sizes
719 * @opstats add_Eb_Gb
720 * @opgroup og_gen_arith_bin
721 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
722 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
723 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
724 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
725 */
726FNIEMOP_DEF(iemOp_add_Eb_Gb)
727{
728 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
729 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_add_u8, iemAImpl_add_u8_locked);
730}
731
732
733/**
734 * @opcode 0x01
735 * @opgroup og_gen_arith_bin
736 * @opflclass arithmetic
737 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
738 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
739 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
740 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
741 */
742FNIEMOP_DEF(iemOp_add_Ev_Gv)
743{
744 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
745 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
746 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
747}
748
749
750/**
751 * @opcode 0x02
752 * @opgroup og_gen_arith_bin
753 * @opflclass arithmetic
754 * @opcopytests iemOp_add_Eb_Gb
755 */
756FNIEMOP_DEF(iemOp_add_Gb_Eb)
757{
758 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
759 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
760}
761
762
763/**
764 * @opcode 0x03
765 * @opgroup og_gen_arith_bin
766 * @opflclass arithmetic
767 * @opcopytests iemOp_add_Ev_Gv
768 */
769FNIEMOP_DEF(iemOp_add_Gv_Ev)
770{
771 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
772 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1, 0);
773}
774
775
776/**
777 * @opcode 0x04
778 * @opgroup og_gen_arith_bin
779 * @opflclass arithmetic
780 * @opcopytests iemOp_add_Eb_Gb
781 */
782FNIEMOP_DEF(iemOp_add_Al_Ib)
783{
784 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
785 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
786}
787
788
789/**
790 * @opcode 0x05
791 * @opgroup og_gen_arith_bin
792 * @opflclass arithmetic
793 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
794 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
795 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
796 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
797 */
798FNIEMOP_DEF(iemOp_add_eAX_Iz)
799{
800 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
801 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
802}
803
804
805/**
806 * @opcode 0x06
807 * @opgroup og_stack_sreg
808 */
809FNIEMOP_DEF(iemOp_push_ES)
810{
811 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
812 IEMOP_HLP_NO_64BIT();
813 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
814}
815
816
817/**
818 * @opcode 0x07
819 * @opgroup og_stack_sreg
820 */
821FNIEMOP_DEF(iemOp_pop_ES)
822{
823 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
824 IEMOP_HLP_NO_64BIT();
825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
826 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
827 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
828 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
829 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
830 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
831 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES),
832 iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
833}
834
835
836/**
837 * @opcode 0x08
838 * @opgroup og_gen_arith_bin
839 * @opflclass logical
840 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
841 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
842 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
843 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
844 */
845FNIEMOP_DEF(iemOp_or_Eb_Gb)
846{
847 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
848 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
849 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_or_u8, iemAImpl_or_u8_locked);
850}
851
852
853/*
854 * @opcode 0x09
855 * @opgroup og_gen_arith_bin
856 * @opflclass logical
857 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
858 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
859 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
860 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
861 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
862 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
863 * @note AF is documented as undefined, but both modern AMD and Intel CPUs clears it.
864 */
865FNIEMOP_DEF(iemOp_or_Ev_Gv)
866{
867 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
868 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
869 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
870 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
871}
872
873
874/**
875 * @opcode 0x0a
876 * @opgroup og_gen_arith_bin
877 * @opflclass logical
878 * @opcopytests iemOp_or_Eb_Gb
879 */
880FNIEMOP_DEF(iemOp_or_Gb_Eb)
881{
882 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
883 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
884 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
885}
886
887
888/**
889 * @opcode 0x0b
890 * @opgroup og_gen_arith_bin
891 * @opflclass logical
892 * @opcopytests iemOp_or_Ev_Gv
893 */
894FNIEMOP_DEF(iemOp_or_Gv_Ev)
895{
896 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
897 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
898 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1, 0);
899}
900
901
902/**
903 * @opcode 0x0c
904 * @opgroup og_gen_arith_bin
905 * @opflclass logical
906 * @opcopytests iemOp_or_Eb_Gb
907 */
908FNIEMOP_DEF(iemOp_or_Al_Ib)
909{
910 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
911 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
912 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
913}
914
915
916/**
917 * @opcode 0x0d
918 * @opgroup og_gen_arith_bin
919 * @opflclass logical
920 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
921 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
922 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
923 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
924 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
925 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
926 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
927 */
928FNIEMOP_DEF(iemOp_or_eAX_Iz)
929{
930 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
931 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
932 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
933}
934
935
936/**
937 * @opcode 0x0e
938 * @opgroup og_stack_sreg
939 */
940FNIEMOP_DEF(iemOp_push_CS)
941{
942 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
943 IEMOP_HLP_NO_64BIT();
944 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
945}
946
947
948/**
949 * @opcode 0x0f
950 * @opmnemonic EscTwo0f
951 * @openc two0f
952 * @opdisenum OP_2B_ESC
953 * @ophints harmless
954 * @opgroup og_escapes
955 */
956FNIEMOP_DEF(iemOp_2byteEscape)
957{
958#if 0 /// @todo def VBOX_STRICT
959 /* Sanity check the table the first time around. */
960 static bool s_fTested = false;
961 if (RT_LIKELY(s_fTested)) { /* likely */ }
962 else
963 {
964 s_fTested = true;
965 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
966 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
967 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
968 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
969 }
970#endif
971
972 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
973 {
974 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
975 IEMOP_HLP_MIN_286();
976 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
977 }
978 /* @opdone */
979
980 /*
981 * On the 8086 this is a POP CS instruction.
982 * For the time being we don't specify this this.
983 */
984 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
985 IEMOP_HLP_NO_64BIT();
986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
987 /** @todo eliminate END_TB here */
988 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
989 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
990 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_CS),
991 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
992}
993
994/**
995 * @opcode 0x10
996 * @opgroup og_gen_arith_bin
997 * @opflclass arithmetic_carry
998 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
999 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1000 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1001 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1002 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1003 */
1004FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1005{
1006 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1007 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_adc_u8, iemAImpl_adc_u8_locked);
1008}
1009
1010
1011/**
1012 * @opcode 0x11
1013 * @opgroup og_gen_arith_bin
1014 * @opflclass arithmetic_carry
1015 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1016 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1017 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1018 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1019 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1020 */
1021FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1022{
1023 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1024 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
1025 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1026}
1027
1028
1029/**
1030 * @opcode 0x12
1031 * @opgroup og_gen_arith_bin
1032 * @opflclass arithmetic_carry
1033 * @opcopytests iemOp_adc_Eb_Gb
1034 */
1035FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1036{
1037 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1038 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
1039}
1040
1041
1042/**
1043 * @opcode 0x13
1044 * @opgroup og_gen_arith_bin
1045 * @opflclass arithmetic_carry
1046 * @opcopytests iemOp_adc_Ev_Gv
1047 */
1048FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1049{
1050 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1051 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1, 0);
1052}
1053
1054
1055/**
1056 * @opcode 0x14
1057 * @opgroup og_gen_arith_bin
1058 * @opflclass arithmetic_carry
1059 * @opcopytests iemOp_adc_Eb_Gb
1060 */
1061FNIEMOP_DEF(iemOp_adc_Al_Ib)
1062{
1063 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1064 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1065}
1066
1067
1068/**
1069 * @opcode 0x15
1070 * @opgroup og_gen_arith_bin
1071 * @opflclass arithmetic_carry
1072 * @opcopytests iemOp_adc_Ev_Gv
1073 */
1074FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1075{
1076 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1077 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1078}
1079
1080
1081/**
1082 * @opcode 0x16
1083 */
1084FNIEMOP_DEF(iemOp_push_SS)
1085{
1086 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1087 IEMOP_HLP_NO_64BIT();
1088 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1089}
1090
1091
1092/**
1093 * @opcode 0x17
1094 */
1095FNIEMOP_DEF(iemOp_pop_SS)
1096{
1097 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1099 IEMOP_HLP_NO_64BIT();
1100 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1101 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1102 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_SS)
1103 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_SS)
1104 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_SS)
1105 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_SS),
1106 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1107}
1108
1109
1110/**
1111 * @opcode 0x18
1112 * @opgroup og_gen_arith_bin
1113 * @opflclass arithmetic_carry
1114 */
1115FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1116{
1117 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1118 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_sbb_u8, iemAImpl_sbb_u8_locked);
1119}
1120
1121
1122/**
1123 * @opcode 0x19
1124 * @opgroup og_gen_arith_bin
1125 * @opflclass arithmetic_carry
1126 */
1127FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1128{
1129 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1130 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
1131 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1132}
1133
1134
1135/**
1136 * @opcode 0x1a
1137 * @opgroup og_gen_arith_bin
1138 * @opflclass arithmetic_carry
1139 */
1140FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1141{
1142 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1143 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
1144}
1145
1146
1147/**
1148 * @opcode 0x1b
1149 * @opgroup og_gen_arith_bin
1150 * @opflclass arithmetic_carry
1151 */
1152FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1153{
1154 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1155 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1, 0);
1156}
1157
1158
1159/**
1160 * @opcode 0x1c
1161 * @opgroup og_gen_arith_bin
1162 * @opflclass arithmetic_carry
1163 */
1164FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1165{
1166 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1167 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1168}
1169
1170
1171/**
1172 * @opcode 0x1d
1173 * @opgroup og_gen_arith_bin
1174 * @opflclass arithmetic_carry
1175 */
1176FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1177{
1178 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1179 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1180}
1181
1182
1183/**
1184 * @opcode 0x1e
1185 * @opgroup og_stack_sreg
1186 */
1187FNIEMOP_DEF(iemOp_push_DS)
1188{
1189 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1190 IEMOP_HLP_NO_64BIT();
1191 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1192}
1193
1194
1195/**
1196 * @opcode 0x1f
1197 * @opgroup og_stack_sreg
1198 */
1199FNIEMOP_DEF(iemOp_pop_DS)
1200{
1201 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1203 IEMOP_HLP_NO_64BIT();
1204 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1205 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1206 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
1207 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
1208 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
1209 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS),
1210 iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1211}
1212
1213
1214/**
1215 * @opcode 0x20
1216 * @opgroup og_gen_arith_bin
1217 * @opflclass logical
1218 */
1219FNIEMOP_DEF(iemOp_and_Eb_Gb)
1220{
1221 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1222 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1223 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_and_u8, iemAImpl_and_u8_locked);
1224}
1225
1226
1227/**
1228 * @opcode 0x21
1229 * @opgroup og_gen_arith_bin
1230 * @opflclass logical
1231 */
1232FNIEMOP_DEF(iemOp_and_Ev_Gv)
1233{
1234 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1235 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1236 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
1237 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1238}
1239
1240
1241/**
1242 * @opcode 0x22
1243 * @opgroup og_gen_arith_bin
1244 * @opflclass logical
1245 */
1246FNIEMOP_DEF(iemOp_and_Gb_Eb)
1247{
1248 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1249 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1250 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1251}
1252
1253
1254/**
1255 * @opcode 0x23
1256 * @opgroup og_gen_arith_bin
1257 * @opflclass logical
1258 */
1259FNIEMOP_DEF(iemOp_and_Gv_Ev)
1260{
1261 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1262 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1263 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1, 0);
1264}
1265
1266
1267/**
1268 * @opcode 0x24
1269 * @opgroup og_gen_arith_bin
1270 * @opflclass logical
1271 */
1272FNIEMOP_DEF(iemOp_and_Al_Ib)
1273{
1274 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1275 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1276 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1277}
1278
1279
1280/**
1281 * @opcode 0x25
1282 * @opgroup og_gen_arith_bin
1283 * @opflclass logical
1284 */
1285FNIEMOP_DEF(iemOp_and_eAX_Iz)
1286{
1287 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1288 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1289 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1290}
1291
1292
1293/**
1294 * @opcode 0x26
1295 * @opmnemonic SEG
1296 * @op1 ES
1297 * @opgroup og_prefix
1298 * @openc prefix
1299 * @opdisenum OP_SEG
1300 * @ophints harmless
1301 */
1302FNIEMOP_DEF(iemOp_seg_ES)
1303{
1304 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1305 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1306 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1307
1308 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1309 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1310}
1311
1312
1313/**
1314 * @opcode 0x27
1315 * @opfltest af,cf
1316 * @opflmodify cf,pf,af,zf,sf,of
1317 * @opflundef of
1318 */
1319FNIEMOP_DEF(iemOp_daa)
1320{
1321 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1322 IEMOP_HLP_NO_64BIT();
1323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1324 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1325 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_daa);
1326}
1327
1328
1329/**
1330 * @opcode 0x28
1331 * @opgroup og_gen_arith_bin
1332 * @opflclass arithmetic
1333 */
1334FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1335{
1336 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1337 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_sub_u8, iemAImpl_sub_u8_locked);
1338}
1339
1340
1341/**
1342 * @opcode 0x29
1343 * @opgroup og_gen_arith_bin
1344 * @opflclass arithmetic
1345 */
1346FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1347{
1348 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1349 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
1350 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1351}
1352
1353
1354/**
1355 * @opcode 0x2a
1356 * @opgroup og_gen_arith_bin
1357 * @opflclass arithmetic
1358 */
1359FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1360{
1361 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1362 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1363}
1364
1365
1366/**
1367 * @opcode 0x2b
1368 * @opgroup og_gen_arith_bin
1369 * @opflclass arithmetic
1370 */
1371FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1372{
1373 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1374 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1, 0);
1375}
1376
1377
1378/**
1379 * @opcode 0x2c
1380 * @opgroup og_gen_arith_bin
1381 * @opflclass arithmetic
1382 */
1383FNIEMOP_DEF(iemOp_sub_Al_Ib)
1384{
1385 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1386 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1387}
1388
1389
1390/**
1391 * @opcode 0x2d
1392 * @opgroup og_gen_arith_bin
1393 * @opflclass arithmetic
1394 */
1395FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1396{
1397 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1398 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1399}
1400
1401
1402/**
1403 * @opcode 0x2e
1404 * @opmnemonic SEG
1405 * @op1 CS
1406 * @opgroup og_prefix
1407 * @openc prefix
1408 * @opdisenum OP_SEG
1409 * @ophints harmless
1410 */
1411FNIEMOP_DEF(iemOp_seg_CS)
1412{
1413 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1414 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1415 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1416
1417 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1418 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1419}
1420
1421
1422/**
1423 * @opcode 0x2f
1424 * @opfltest af,cf
1425 * @opflmodify cf,pf,af,zf,sf,of
1426 * @opflundef of
1427 */
1428FNIEMOP_DEF(iemOp_das)
1429{
1430 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1431 IEMOP_HLP_NO_64BIT();
1432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1433 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1434 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_das);
1435}
1436
1437
1438/**
1439 * @opcode 0x30
1440 * @opgroup og_gen_arith_bin
1441 * @opflclass logical
1442 */
1443FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1444{
1445 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1446 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1447 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_xor_u8, iemAImpl_xor_u8_locked);
1448}
1449
1450
1451/**
1452 * @opcode 0x31
1453 * @opgroup og_gen_arith_bin
1454 * @opflclass logical
1455 */
1456FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1457{
1458 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1459 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1460 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
1461 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1462}
1463
1464
1465/**
1466 * @opcode 0x32
1467 * @opgroup og_gen_arith_bin
1468 * @opflclass logical
1469 */
1470FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1471{
1472 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1473 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1474 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1475}
1476
1477
1478/**
1479 * @opcode 0x33
1480 * @opgroup og_gen_arith_bin
1481 * @opflclass logical
1482 */
1483FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1484{
1485 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1486 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1487 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1, 0);
1488}
1489
1490
1491/**
1492 * @opcode 0x34
1493 * @opgroup og_gen_arith_bin
1494 * @opflclass logical
1495 */
1496FNIEMOP_DEF(iemOp_xor_Al_Ib)
1497{
1498 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1499 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1500 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1501}
1502
1503
1504/**
1505 * @opcode 0x35
1506 * @opgroup og_gen_arith_bin
1507 * @opflclass logical
1508 */
1509FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1510{
1511 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1512 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1513 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1514}
1515
1516
1517/**
1518 * @opcode 0x36
1519 * @opmnemonic SEG
1520 * @op1 SS
1521 * @opgroup og_prefix
1522 * @openc prefix
1523 * @opdisenum OP_SEG
1524 * @ophints harmless
1525 */
1526FNIEMOP_DEF(iemOp_seg_SS)
1527{
1528 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1529 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1530 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1531
1532 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1533 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1534}
1535
1536
1537/**
1538 * @opcode 0x37
1539 * @opfltest af
1540 * @opflmodify cf,pf,af,zf,sf,of
1541 * @opflundef pf,zf,sf,of
1542 * @opgroup og_gen_arith_dec
1543 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1544 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1545 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1546 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1547 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1548 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1549 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1550 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1551 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1552 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1553 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1554 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1555 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1556 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1557 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1558 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1559 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1560 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1561 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1562 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1563 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1564 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1565 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1566 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1567 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1568 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1569 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1570 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1571 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1572 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1573 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1574 */
1575FNIEMOP_DEF(iemOp_aaa)
1576{
1577 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1578 IEMOP_HLP_NO_64BIT();
1579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1580 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1581
1582 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aaa);
1583}
1584
1585
1586/**
1587 * @opcode 0x38
1588 * @opflclass arithmetic
1589 */
1590FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1591{
1592 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1593 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8);
1594}
1595
1596
1597/**
1598 * @opcode 0x39
1599 * @opflclass arithmetic
1600 */
1601FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1602{
1603 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1604 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
1605}
1606
1607
1608/**
1609 * @opcode 0x3a
1610 * @opflclass arithmetic
1611 */
1612FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1613{
1614 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1615 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1616}
1617
1618
1619/**
1620 * @opcode 0x3b
1621 * @opflclass arithmetic
1622 */
1623FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1624{
1625 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1626 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0, 0);
1627}
1628
1629
1630/**
1631 * @opcode 0x3c
1632 * @opflclass arithmetic
1633 */
1634FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1635{
1636 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1637 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1638}
1639
1640
1641/**
1642 * @opcode 0x3d
1643 * @opflclass arithmetic
1644 */
1645FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1646{
1647 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1648 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1649}
1650
1651
1652/**
1653 * @opcode 0x3e
1654 */
1655FNIEMOP_DEF(iemOp_seg_DS)
1656{
1657 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1658 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1659 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1660
1661 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1662 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1663}
1664
1665
1666/**
1667 * @opcode 0x3f
1668 * @opfltest af
1669 * @opflmodify cf,pf,af,zf,sf,of
1670 * @opflundef pf,zf,sf,of
1671 * @opgroup og_gen_arith_dec
1672 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1673 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1674 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1675 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1676 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1677 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1678 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1679 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1680 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1681 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1682 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1683 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1684 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1685 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1686 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1687 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1688 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1689 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1690 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1691 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1692 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1693 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1694 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1695 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1696 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1697 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1698 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1699 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1700 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1701 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1702 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1703 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1704 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1705 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1706 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1707 */
1708FNIEMOP_DEF(iemOp_aas)
1709{
1710 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1711 IEMOP_HLP_NO_64BIT();
1712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1713 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1714
1715 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aas);
1716}
1717
1718
1719/**
1720 * Common 'inc/dec register' helper.
1721 *
1722 * Not for 64-bit code, only for what became the rex prefixes.
1723 */
1724#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1725 switch (pVCpu->iem.s.enmEffOpSize) \
1726 { \
1727 case IEMMODE_16BIT: \
1728 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT, 0); \
1729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1730 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1731 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1732 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1733 IEM_MC_REF_EFLAGS(pEFlags); \
1734 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1735 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1736 IEM_MC_END(); \
1737 break; \
1738 \
1739 case IEMMODE_32BIT: \
1740 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
1741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1742 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1743 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1744 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1745 IEM_MC_REF_EFLAGS(pEFlags); \
1746 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1747 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
1748 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1749 IEM_MC_END(); \
1750 break; \
1751 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1752 } \
1753 (void)0
1754
1755/**
1756 * @opcode 0x40
1757 * @opflclass incdec
1758 */
1759FNIEMOP_DEF(iemOp_inc_eAX)
1760{
1761 /*
1762 * This is a REX prefix in 64-bit mode.
1763 */
1764 if (IEM_IS_64BIT_CODE(pVCpu))
1765 {
1766 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1767 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1768
1769 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1770 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1771 }
1772
1773 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1774 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1775}
1776
1777
1778/**
1779 * @opcode 0x41
1780 * @opflclass incdec
1781 */
1782FNIEMOP_DEF(iemOp_inc_eCX)
1783{
1784 /*
1785 * This is a REX prefix in 64-bit mode.
1786 */
1787 if (IEM_IS_64BIT_CODE(pVCpu))
1788 {
1789 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1790 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1791 pVCpu->iem.s.uRexB = 1 << 3;
1792
1793 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1794 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1795 }
1796
1797 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1798 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1799}
1800
1801
1802/**
1803 * @opcode 0x42
1804 * @opflclass incdec
1805 */
1806FNIEMOP_DEF(iemOp_inc_eDX)
1807{
1808 /*
1809 * This is a REX prefix in 64-bit mode.
1810 */
1811 if (IEM_IS_64BIT_CODE(pVCpu))
1812 {
1813 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1814 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1815 pVCpu->iem.s.uRexIndex = 1 << 3;
1816
1817 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1818 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1819 }
1820
1821 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1822 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1823}
1824
1825
1826
1827/**
1828 * @opcode 0x43
1829 * @opflclass incdec
1830 */
1831FNIEMOP_DEF(iemOp_inc_eBX)
1832{
1833 /*
1834 * This is a REX prefix in 64-bit mode.
1835 */
1836 if (IEM_IS_64BIT_CODE(pVCpu))
1837 {
1838 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1839 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1840 pVCpu->iem.s.uRexB = 1 << 3;
1841 pVCpu->iem.s.uRexIndex = 1 << 3;
1842
1843 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1844 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1845 }
1846
1847 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1848 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1849}
1850
1851
1852/**
1853 * @opcode 0x44
1854 * @opflclass incdec
1855 */
1856FNIEMOP_DEF(iemOp_inc_eSP)
1857{
1858 /*
1859 * This is a REX prefix in 64-bit mode.
1860 */
1861 if (IEM_IS_64BIT_CODE(pVCpu))
1862 {
1863 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1864 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1865 pVCpu->iem.s.uRexReg = 1 << 3;
1866
1867 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1868 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1869 }
1870
1871 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1872 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1873}
1874
1875
1876/**
1877 * @opcode 0x45
1878 * @opflclass incdec
1879 */
1880FNIEMOP_DEF(iemOp_inc_eBP)
1881{
1882 /*
1883 * This is a REX prefix in 64-bit mode.
1884 */
1885 if (IEM_IS_64BIT_CODE(pVCpu))
1886 {
1887 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1888 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1889 pVCpu->iem.s.uRexReg = 1 << 3;
1890 pVCpu->iem.s.uRexB = 1 << 3;
1891
1892 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1893 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1894 }
1895
1896 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1897 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1898}
1899
1900
1901/**
1902 * @opcode 0x46
1903 * @opflclass incdec
1904 */
1905FNIEMOP_DEF(iemOp_inc_eSI)
1906{
1907 /*
1908 * This is a REX prefix in 64-bit mode.
1909 */
1910 if (IEM_IS_64BIT_CODE(pVCpu))
1911 {
1912 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1913 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1914 pVCpu->iem.s.uRexReg = 1 << 3;
1915 pVCpu->iem.s.uRexIndex = 1 << 3;
1916
1917 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1918 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1919 }
1920
1921 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1922 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1923}
1924
1925
1926/**
1927 * @opcode 0x47
1928 * @opflclass incdec
1929 */
1930FNIEMOP_DEF(iemOp_inc_eDI)
1931{
1932 /*
1933 * This is a REX prefix in 64-bit mode.
1934 */
1935 if (IEM_IS_64BIT_CODE(pVCpu))
1936 {
1937 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1938 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1939 pVCpu->iem.s.uRexReg = 1 << 3;
1940 pVCpu->iem.s.uRexB = 1 << 3;
1941 pVCpu->iem.s.uRexIndex = 1 << 3;
1942
1943 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1944 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1945 }
1946
1947 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1948 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
1949}
1950
1951
1952/**
1953 * @opcode 0x48
1954 * @opflclass incdec
1955 */
1956FNIEMOP_DEF(iemOp_dec_eAX)
1957{
1958 /*
1959 * This is a REX prefix in 64-bit mode.
1960 */
1961 if (IEM_IS_64BIT_CODE(pVCpu))
1962 {
1963 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1964 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1965 iemRecalEffOpSize(pVCpu);
1966
1967 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1968 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1969 }
1970
1971 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1972 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
1973}
1974
1975
1976/**
1977 * @opcode 0x49
1978 * @opflclass incdec
1979 */
1980FNIEMOP_DEF(iemOp_dec_eCX)
1981{
1982 /*
1983 * This is a REX prefix in 64-bit mode.
1984 */
1985 if (IEM_IS_64BIT_CODE(pVCpu))
1986 {
1987 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1988 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1989 pVCpu->iem.s.uRexB = 1 << 3;
1990 iemRecalEffOpSize(pVCpu);
1991
1992 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1993 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1994 }
1995
1996 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1997 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
1998}
1999
2000
2001/**
2002 * @opcode 0x4a
2003 * @opflclass incdec
2004 */
2005FNIEMOP_DEF(iemOp_dec_eDX)
2006{
2007 /*
2008 * This is a REX prefix in 64-bit mode.
2009 */
2010 if (IEM_IS_64BIT_CODE(pVCpu))
2011 {
2012 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2013 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2014 pVCpu->iem.s.uRexIndex = 1 << 3;
2015 iemRecalEffOpSize(pVCpu);
2016
2017 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2018 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2019 }
2020
2021 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2022 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2023}
2024
2025
2026/**
2027 * @opcode 0x4b
2028 * @opflclass incdec
2029 */
2030FNIEMOP_DEF(iemOp_dec_eBX)
2031{
2032 /*
2033 * This is a REX prefix in 64-bit mode.
2034 */
2035 if (IEM_IS_64BIT_CODE(pVCpu))
2036 {
2037 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2038 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2039 pVCpu->iem.s.uRexB = 1 << 3;
2040 pVCpu->iem.s.uRexIndex = 1 << 3;
2041 iemRecalEffOpSize(pVCpu);
2042
2043 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2044 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2045 }
2046
2047 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2048 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2049}
2050
2051
2052/**
2053 * @opcode 0x4c
2054 * @opflclass incdec
2055 */
2056FNIEMOP_DEF(iemOp_dec_eSP)
2057{
2058 /*
2059 * This is a REX prefix in 64-bit mode.
2060 */
2061 if (IEM_IS_64BIT_CODE(pVCpu))
2062 {
2063 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2064 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2065 pVCpu->iem.s.uRexReg = 1 << 3;
2066 iemRecalEffOpSize(pVCpu);
2067
2068 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2069 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2070 }
2071
2072 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2073 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2074}
2075
2076
2077/**
2078 * @opcode 0x4d
2079 * @opflclass incdec
2080 */
2081FNIEMOP_DEF(iemOp_dec_eBP)
2082{
2083 /*
2084 * This is a REX prefix in 64-bit mode.
2085 */
2086 if (IEM_IS_64BIT_CODE(pVCpu))
2087 {
2088 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2089 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2090 pVCpu->iem.s.uRexReg = 1 << 3;
2091 pVCpu->iem.s.uRexB = 1 << 3;
2092 iemRecalEffOpSize(pVCpu);
2093
2094 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2095 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2096 }
2097
2098 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2099 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2100}
2101
2102
2103/**
2104 * @opcode 0x4e
2105 * @opflclass incdec
2106 */
2107FNIEMOP_DEF(iemOp_dec_eSI)
2108{
2109 /*
2110 * This is a REX prefix in 64-bit mode.
2111 */
2112 if (IEM_IS_64BIT_CODE(pVCpu))
2113 {
2114 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2115 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2116 pVCpu->iem.s.uRexReg = 1 << 3;
2117 pVCpu->iem.s.uRexIndex = 1 << 3;
2118 iemRecalEffOpSize(pVCpu);
2119
2120 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2121 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2122 }
2123
2124 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2125 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2126}
2127
2128
2129/**
2130 * @opcode 0x4f
2131 * @opflclass incdec
2132 */
2133FNIEMOP_DEF(iemOp_dec_eDI)
2134{
2135 /*
2136 * This is a REX prefix in 64-bit mode.
2137 */
2138 if (IEM_IS_64BIT_CODE(pVCpu))
2139 {
2140 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2141 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2142 pVCpu->iem.s.uRexReg = 1 << 3;
2143 pVCpu->iem.s.uRexB = 1 << 3;
2144 pVCpu->iem.s.uRexIndex = 1 << 3;
2145 iemRecalEffOpSize(pVCpu);
2146
2147 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2148 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2149 }
2150
2151 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2152 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2153}
2154
2155
2156/**
2157 * Common 'push register' helper.
2158 */
2159FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2160{
2161 if (IEM_IS_64BIT_CODE(pVCpu))
2162 {
2163 iReg |= pVCpu->iem.s.uRexB;
2164 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2165 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2166 }
2167
2168 switch (pVCpu->iem.s.enmEffOpSize)
2169 {
2170 case IEMMODE_16BIT:
2171 IEM_MC_BEGIN(0, 1, 0, 0);
2172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2173 IEM_MC_LOCAL(uint16_t, u16Value);
2174 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2175 IEM_MC_PUSH_U16(u16Value);
2176 IEM_MC_ADVANCE_RIP_AND_FINISH();
2177 IEM_MC_END();
2178 break;
2179
2180 case IEMMODE_32BIT:
2181 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2183 IEM_MC_LOCAL(uint32_t, u32Value);
2184 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2185 IEM_MC_PUSH_U32(u32Value);
2186 IEM_MC_ADVANCE_RIP_AND_FINISH();
2187 IEM_MC_END();
2188 break;
2189
2190 case IEMMODE_64BIT:
2191 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2193 IEM_MC_LOCAL(uint64_t, u64Value);
2194 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2195 IEM_MC_PUSH_U64(u64Value);
2196 IEM_MC_ADVANCE_RIP_AND_FINISH();
2197 IEM_MC_END();
2198 break;
2199
2200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2201 }
2202}
2203
2204
2205/**
2206 * @opcode 0x50
2207 */
2208FNIEMOP_DEF(iemOp_push_eAX)
2209{
2210 IEMOP_MNEMONIC(push_rAX, "push rAX");
2211 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2212}
2213
2214
2215/**
2216 * @opcode 0x51
2217 */
2218FNIEMOP_DEF(iemOp_push_eCX)
2219{
2220 IEMOP_MNEMONIC(push_rCX, "push rCX");
2221 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2222}
2223
2224
2225/**
2226 * @opcode 0x52
2227 */
2228FNIEMOP_DEF(iemOp_push_eDX)
2229{
2230 IEMOP_MNEMONIC(push_rDX, "push rDX");
2231 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2232}
2233
2234
2235/**
2236 * @opcode 0x53
2237 */
2238FNIEMOP_DEF(iemOp_push_eBX)
2239{
2240 IEMOP_MNEMONIC(push_rBX, "push rBX");
2241 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2242}
2243
2244
2245/**
2246 * @opcode 0x54
2247 */
2248FNIEMOP_DEF(iemOp_push_eSP)
2249{
2250 IEMOP_MNEMONIC(push_rSP, "push rSP");
2251 if (IEM_GET_TARGET_CPU(pVCpu) != IEMTARGETCPU_8086)
2252 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2253
2254 /* 8086 works differently wrt to 'push sp' compared to 80186 and later. */
2255 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086, 0);
2256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2257 IEM_MC_LOCAL(uint16_t, u16Value);
2258 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2259 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2260 IEM_MC_PUSH_U16(u16Value);
2261 IEM_MC_ADVANCE_RIP_AND_FINISH();
2262 IEM_MC_END();
2263}
2264
2265
2266/**
2267 * @opcode 0x55
2268 */
2269FNIEMOP_DEF(iemOp_push_eBP)
2270{
2271 IEMOP_MNEMONIC(push_rBP, "push rBP");
2272 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2273}
2274
2275
2276/**
2277 * @opcode 0x56
2278 */
2279FNIEMOP_DEF(iemOp_push_eSI)
2280{
2281 IEMOP_MNEMONIC(push_rSI, "push rSI");
2282 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2283}
2284
2285
2286/**
2287 * @opcode 0x57
2288 */
2289FNIEMOP_DEF(iemOp_push_eDI)
2290{
2291 IEMOP_MNEMONIC(push_rDI, "push rDI");
2292 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2293}
2294
2295
2296/**
2297 * Common 'pop register' helper.
2298 */
2299FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2300{
2301 if (IEM_IS_64BIT_CODE(pVCpu))
2302 {
2303 iReg |= pVCpu->iem.s.uRexB;
2304 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2305 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2306 }
2307
2308 switch (pVCpu->iem.s.enmEffOpSize)
2309 {
2310 case IEMMODE_16BIT:
2311 IEM_MC_BEGIN(0, 0, 0, 0);
2312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2313 IEM_MC_POP_GREG_U16(iReg);
2314 IEM_MC_ADVANCE_RIP_AND_FINISH();
2315 IEM_MC_END();
2316 break;
2317
2318 case IEMMODE_32BIT:
2319 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2321 IEM_MC_POP_GREG_U32(iReg);
2322 IEM_MC_ADVANCE_RIP_AND_FINISH();
2323 IEM_MC_END();
2324 break;
2325
2326 case IEMMODE_64BIT:
2327 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
2328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2329 IEM_MC_POP_GREG_U64(iReg);
2330 IEM_MC_ADVANCE_RIP_AND_FINISH();
2331 IEM_MC_END();
2332 break;
2333
2334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2335 }
2336}
2337
2338
2339/**
2340 * @opcode 0x58
2341 */
2342FNIEMOP_DEF(iemOp_pop_eAX)
2343{
2344 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2345 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2346}
2347
2348
2349/**
2350 * @opcode 0x59
2351 */
2352FNIEMOP_DEF(iemOp_pop_eCX)
2353{
2354 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2355 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2356}
2357
2358
2359/**
2360 * @opcode 0x5a
2361 */
2362FNIEMOP_DEF(iemOp_pop_eDX)
2363{
2364 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2365 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2366}
2367
2368
2369/**
2370 * @opcode 0x5b
2371 */
2372FNIEMOP_DEF(iemOp_pop_eBX)
2373{
2374 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2375 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2376}
2377
2378
2379/**
2380 * @opcode 0x5c
2381 */
2382FNIEMOP_DEF(iemOp_pop_eSP)
2383{
2384 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2385 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2386}
2387
2388
2389/**
2390 * @opcode 0x5d
2391 */
2392FNIEMOP_DEF(iemOp_pop_eBP)
2393{
2394 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2395 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2396}
2397
2398
2399/**
2400 * @opcode 0x5e
2401 */
2402FNIEMOP_DEF(iemOp_pop_eSI)
2403{
2404 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2405 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2406}
2407
2408
2409/**
2410 * @opcode 0x5f
2411 */
2412FNIEMOP_DEF(iemOp_pop_eDI)
2413{
2414 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2415 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2416}
2417
2418
2419/**
2420 * @opcode 0x60
2421 */
2422FNIEMOP_DEF(iemOp_pusha)
2423{
2424 IEMOP_MNEMONIC(pusha, "pusha");
2425 IEMOP_HLP_MIN_186();
2426 IEMOP_HLP_NO_64BIT();
2427 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2428 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_16);
2429 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2430 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_32);
2431}
2432
2433
2434/**
2435 * @opcode 0x61
2436 */
2437FNIEMOP_DEF(iemOp_popa__mvex)
2438{
2439 if (!IEM_IS_64BIT_CODE(pVCpu))
2440 {
2441 IEMOP_MNEMONIC(popa, "popa");
2442 IEMOP_HLP_MIN_186();
2443 IEMOP_HLP_NO_64BIT();
2444 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2445 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2446 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2447 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2448 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2449 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2450 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2451 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2452 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2453 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2454 iemCImpl_popa_16);
2455 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2456 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2457 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2458 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2459 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2460 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2461 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2462 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2463 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2464 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2465 iemCImpl_popa_32);
2466 }
2467 IEMOP_MNEMONIC(mvex, "mvex");
2468 Log(("mvex prefix is not supported!\n"));
2469 IEMOP_RAISE_INVALID_OPCODE_RET();
2470}
2471
2472
2473/**
2474 * @opcode 0x62
2475 * @opmnemonic bound
2476 * @op1 Gv_RO
2477 * @op2 Ma
2478 * @opmincpu 80186
2479 * @ophints harmless x86_invalid_64
2480 * @optest op1=0 op2=0 ->
2481 * @optest op1=1 op2=0 -> value.xcpt=5
2482 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2483 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2484 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2485 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2486 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2487 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2488 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2489 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2490 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2491 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2492 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2493 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2494 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2495 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2496 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2497 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2498 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2499 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2500 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2501 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2502 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2503 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2504 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2505 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2506 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2507 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2508 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2509 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2510 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2511 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2512 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2513 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2514 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2515 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2516 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2517 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2518 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2519 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2520 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2521 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2522 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2523 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2524 */
2525FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2526{
2527 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2528 compatability mode it is invalid with MOD=3.
2529
2530 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2531 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2532 given as R and X without an exact description, so we assume it builds on
2533 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2534 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2535 uint8_t bRm;
2536 if (!IEM_IS_64BIT_CODE(pVCpu))
2537 {
2538 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2539 IEMOP_HLP_MIN_186();
2540 IEM_OPCODE_GET_NEXT_U8(&bRm);
2541 if (IEM_IS_MODRM_MEM_MODE(bRm))
2542 {
2543 /** @todo testcase: check that there are two memory accesses involved. Check
2544 * whether they're both read before the \#BR triggers. */
2545 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2546 {
2547 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
2548 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2549 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2550 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2552
2553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2555
2556 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2557 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2558 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2559
2560 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2561 IEM_MC_END();
2562 }
2563 else /* 32-bit operands */
2564 {
2565 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2566 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2567 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2568 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2570
2571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2573
2574 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2575 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2576 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2577
2578 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2579 IEM_MC_END();
2580 }
2581 }
2582
2583 /*
2584 * @opdone
2585 */
2586 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2587 {
2588 /* Note that there is no need for the CPU to fetch further bytes
2589 here because MODRM.MOD == 3. */
2590 Log(("evex not supported by the guest CPU!\n"));
2591 IEMOP_RAISE_INVALID_OPCODE_RET();
2592 }
2593 }
2594 else
2595 {
2596 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2597 * does modr/m read, whereas AMD probably doesn't... */
2598 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2599 {
2600 Log(("evex not supported by the guest CPU!\n"));
2601 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2602 }
2603 IEM_OPCODE_GET_NEXT_U8(&bRm);
2604 }
2605
2606 IEMOP_MNEMONIC(evex, "evex");
2607 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2608 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2609 Log(("evex prefix is not implemented!\n"));
2610 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2611}
2612
2613
2614/**
2615 * @opcode 0x63
2616 * @opflmodify zf
2617 * @note non-64-bit modes.
2618 */
2619FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2620{
2621 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2622 IEMOP_HLP_MIN_286();
2623 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2624 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2625
2626 if (IEM_IS_MODRM_REG_MODE(bRm))
2627 {
2628 /* Register */
2629 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2630 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2631 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2632 IEM_MC_ARG(uint16_t, u16Src, 1);
2633 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2634
2635 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2636 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2637 IEM_MC_REF_EFLAGS(pEFlags);
2638 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2639
2640 IEM_MC_ADVANCE_RIP_AND_FINISH();
2641 IEM_MC_END();
2642 }
2643 else
2644 {
2645 /* Memory */
2646 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2647 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2648 IEM_MC_ARG(uint16_t, u16Src, 1);
2649 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2651 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2652
2653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2654 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2655 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2656 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2657 IEM_MC_FETCH_EFLAGS(EFlags);
2658 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2659
2660 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
2661 IEM_MC_COMMIT_EFLAGS(EFlags);
2662 IEM_MC_ADVANCE_RIP_AND_FINISH();
2663 IEM_MC_END();
2664 }
2665}
2666
2667
2668/**
2669 * @opcode 0x63
2670 *
2671 * @note This is a weird one. It works like a regular move instruction if
2672 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2673 * @todo This definitely needs a testcase to verify the odd cases. */
2674FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2675{
2676 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2677
2678 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2680
2681 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2682 {
2683 if (IEM_IS_MODRM_REG_MODE(bRm))
2684 {
2685 /*
2686 * Register to register.
2687 */
2688 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2690 IEM_MC_LOCAL(uint64_t, u64Value);
2691 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2692 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2693 IEM_MC_ADVANCE_RIP_AND_FINISH();
2694 IEM_MC_END();
2695 }
2696 else
2697 {
2698 /*
2699 * We're loading a register from memory.
2700 */
2701 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
2702 IEM_MC_LOCAL(uint64_t, u64Value);
2703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2706 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2707 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2708 IEM_MC_ADVANCE_RIP_AND_FINISH();
2709 IEM_MC_END();
2710 }
2711 }
2712 else
2713 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2714}
2715
2716
2717/**
2718 * @opcode 0x64
2719 * @opmnemonic segfs
2720 * @opmincpu 80386
2721 * @opgroup og_prefixes
2722 */
2723FNIEMOP_DEF(iemOp_seg_FS)
2724{
2725 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2726 IEMOP_HLP_MIN_386();
2727
2728 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2729 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2730
2731 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2732 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2733}
2734
2735
2736/**
2737 * @opcode 0x65
2738 * @opmnemonic seggs
2739 * @opmincpu 80386
2740 * @opgroup og_prefixes
2741 */
2742FNIEMOP_DEF(iemOp_seg_GS)
2743{
2744 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2745 IEMOP_HLP_MIN_386();
2746
2747 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2748 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2749
2750 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2751 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2752}
2753
2754
2755/**
2756 * @opcode 0x66
2757 * @opmnemonic opsize
2758 * @openc prefix
2759 * @opmincpu 80386
2760 * @ophints harmless
2761 * @opgroup og_prefixes
2762 */
2763FNIEMOP_DEF(iemOp_op_size)
2764{
2765 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2766 IEMOP_HLP_MIN_386();
2767
2768 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2769 iemRecalEffOpSize(pVCpu);
2770
2771 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2772 when REPZ or REPNZ are present. */
2773 if (pVCpu->iem.s.idxPrefix == 0)
2774 pVCpu->iem.s.idxPrefix = 1;
2775
2776 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2777 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2778}
2779
2780
2781/**
2782 * @opcode 0x67
2783 * @opmnemonic addrsize
2784 * @openc prefix
2785 * @opmincpu 80386
2786 * @ophints harmless
2787 * @opgroup og_prefixes
2788 */
2789FNIEMOP_DEF(iemOp_addr_size)
2790{
2791 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2792 IEMOP_HLP_MIN_386();
2793
2794 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2795 switch (pVCpu->iem.s.enmDefAddrMode)
2796 {
2797 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2798 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2799 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2800 default: AssertFailed();
2801 }
2802
2803 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2804 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2805}
2806
2807
2808/**
2809 * @opcode 0x68
2810 */
2811FNIEMOP_DEF(iemOp_push_Iz)
2812{
2813 IEMOP_MNEMONIC(push_Iz, "push Iz");
2814 IEMOP_HLP_MIN_186();
2815 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2816 switch (pVCpu->iem.s.enmEffOpSize)
2817 {
2818 case IEMMODE_16BIT:
2819 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
2820 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2822 IEM_MC_LOCAL_CONST(uint16_t, u16Value, u16Imm);
2823 IEM_MC_PUSH_U16(u16Value);
2824 IEM_MC_ADVANCE_RIP_AND_FINISH();
2825 IEM_MC_END();
2826 break;
2827
2828 case IEMMODE_32BIT:
2829 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2830 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2832 IEM_MC_LOCAL_CONST(uint32_t, u32Value, u32Imm);
2833 IEM_MC_PUSH_U32(u32Value);
2834 IEM_MC_ADVANCE_RIP_AND_FINISH();
2835 IEM_MC_END();
2836 break;
2837
2838 case IEMMODE_64BIT:
2839 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2840 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2842 IEM_MC_LOCAL_CONST(uint64_t, u64Value, u64Imm);
2843 IEM_MC_PUSH_U64(u64Value);
2844 IEM_MC_ADVANCE_RIP_AND_FINISH();
2845 IEM_MC_END();
2846 break;
2847
2848 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2849 }
2850}
2851
2852
2853/**
2854 * @opcode 0x69
2855 * @opflclass multiply
2856 */
2857FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2858{
2859 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2860 IEMOP_HLP_MIN_186();
2861 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2862 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2863
2864 switch (pVCpu->iem.s.enmEffOpSize)
2865 {
2866 case IEMMODE_16BIT:
2867 {
2868 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2869 if (IEM_IS_MODRM_REG_MODE(bRm))
2870 {
2871 /* register operand */
2872 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2873 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
2874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2875 IEM_MC_LOCAL(uint16_t, u16Tmp);
2876 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2877 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2878 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
2879 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2880 IEM_MC_REF_EFLAGS(pEFlags);
2881 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2882 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2883
2884 IEM_MC_ADVANCE_RIP_AND_FINISH();
2885 IEM_MC_END();
2886 }
2887 else
2888 {
2889 /* memory operand */
2890 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
2891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2893
2894 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2896
2897 IEM_MC_LOCAL(uint16_t, u16Tmp);
2898 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2899
2900 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2901 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
2902 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2903 IEM_MC_REF_EFLAGS(pEFlags);
2904 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2905 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2906
2907 IEM_MC_ADVANCE_RIP_AND_FINISH();
2908 IEM_MC_END();
2909 }
2910 break;
2911 }
2912
2913 case IEMMODE_32BIT:
2914 {
2915 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2916 if (IEM_IS_MODRM_REG_MODE(bRm))
2917 {
2918 /* register operand */
2919 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2920 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
2921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2922 IEM_MC_LOCAL(uint32_t, u32Tmp);
2923 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2924
2925 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
2926 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
2927 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2928 IEM_MC_REF_EFLAGS(pEFlags);
2929 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2930 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2931
2932 IEM_MC_ADVANCE_RIP_AND_FINISH();
2933 IEM_MC_END();
2934 }
2935 else
2936 {
2937 /* memory operand */
2938 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
2939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2941
2942 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2944
2945 IEM_MC_LOCAL(uint32_t, u32Tmp);
2946 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2947
2948 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
2949 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
2950 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2951 IEM_MC_REF_EFLAGS(pEFlags);
2952 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2953 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2954
2955 IEM_MC_ADVANCE_RIP_AND_FINISH();
2956 IEM_MC_END();
2957 }
2958 break;
2959 }
2960
2961 case IEMMODE_64BIT:
2962 {
2963 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
2964 if (IEM_IS_MODRM_REG_MODE(bRm))
2965 {
2966 /* register operand */
2967 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2968 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
2969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2970 IEM_MC_LOCAL(uint64_t, u64Tmp);
2971 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2972
2973 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
2974 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
2975 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2976 IEM_MC_REF_EFLAGS(pEFlags);
2977 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
2978 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2979
2980 IEM_MC_ADVANCE_RIP_AND_FINISH();
2981 IEM_MC_END();
2982 }
2983 else
2984 {
2985 /* memory operand */
2986 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
2987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2989
2990 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
2991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /* parameter count for the threaded function for this block. */
2992
2993 IEM_MC_LOCAL(uint64_t, u64Tmp);
2994 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2995
2996 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
2997 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int32_t)u32Imm, 1);
2998 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2999 IEM_MC_REF_EFLAGS(pEFlags);
3000 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3001 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3002
3003 IEM_MC_ADVANCE_RIP_AND_FINISH();
3004 IEM_MC_END();
3005 }
3006 break;
3007 }
3008
3009 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3010 }
3011}
3012
3013
3014/**
3015 * @opcode 0x6a
3016 */
3017FNIEMOP_DEF(iemOp_push_Ib)
3018{
3019 IEMOP_MNEMONIC(push_Ib, "push Ib");
3020 IEMOP_HLP_MIN_186();
3021 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3022 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3023
3024 switch (pVCpu->iem.s.enmEffOpSize)
3025 {
3026 case IEMMODE_16BIT:
3027 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_186, 0);
3028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3029 IEM_MC_LOCAL_CONST(uint16_t, uValue, (int16_t)i8Imm);
3030 IEM_MC_PUSH_U16(uValue);
3031 IEM_MC_ADVANCE_RIP_AND_FINISH();
3032 IEM_MC_END();
3033 break;
3034 case IEMMODE_32BIT:
3035 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3037 IEM_MC_LOCAL_CONST(uint32_t, uValue, (int32_t)i8Imm);
3038 IEM_MC_PUSH_U32(uValue);
3039 IEM_MC_ADVANCE_RIP_AND_FINISH();
3040 IEM_MC_END();
3041 break;
3042 case IEMMODE_64BIT:
3043 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
3044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3045 IEM_MC_LOCAL_CONST(uint64_t, uValue, (int64_t)i8Imm);
3046 IEM_MC_PUSH_U64(uValue);
3047 IEM_MC_ADVANCE_RIP_AND_FINISH();
3048 IEM_MC_END();
3049 break;
3050 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3051 }
3052}
3053
3054
3055/**
3056 * @opcode 0x6b
3057 * @opflclass multiply
3058 */
3059FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3060{
3061 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3062 IEMOP_HLP_MIN_186();
3063 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3064 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3065
3066 switch (pVCpu->iem.s.enmEffOpSize)
3067 {
3068 case IEMMODE_16BIT:
3069 {
3070 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3071 if (IEM_IS_MODRM_REG_MODE(bRm))
3072 {
3073 /* register operand */
3074 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3075 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3077
3078 IEM_MC_LOCAL(uint16_t, u16Tmp);
3079 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3080
3081 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3082 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3083 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3084 IEM_MC_REF_EFLAGS(pEFlags);
3085 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3086 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3087
3088 IEM_MC_ADVANCE_RIP_AND_FINISH();
3089 IEM_MC_END();
3090 }
3091 else
3092 {
3093 /* memory operand */
3094 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3095
3096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3098
3099 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3101
3102 IEM_MC_LOCAL(uint16_t, u16Tmp);
3103 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3104
3105 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3106 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3107 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3108 IEM_MC_REF_EFLAGS(pEFlags);
3109 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3110 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3111
3112 IEM_MC_ADVANCE_RIP_AND_FINISH();
3113 IEM_MC_END();
3114 }
3115 break;
3116 }
3117
3118 case IEMMODE_32BIT:
3119 {
3120 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3121 if (IEM_IS_MODRM_REG_MODE(bRm))
3122 {
3123 /* register operand */
3124 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3125 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3127 IEM_MC_LOCAL(uint32_t, u32Tmp);
3128 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3129
3130 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3131 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3133 IEM_MC_REF_EFLAGS(pEFlags);
3134 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3135 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3136
3137 IEM_MC_ADVANCE_RIP_AND_FINISH();
3138 IEM_MC_END();
3139 }
3140 else
3141 {
3142 /* memory operand */
3143 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3144 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3146
3147 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3149
3150 IEM_MC_LOCAL(uint32_t, u32Tmp);
3151 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3152
3153 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3154 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3155 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3156 IEM_MC_REF_EFLAGS(pEFlags);
3157 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3158 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3159
3160 IEM_MC_ADVANCE_RIP_AND_FINISH();
3161 IEM_MC_END();
3162 }
3163 break;
3164 }
3165
3166 case IEMMODE_64BIT:
3167 {
3168 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3169 if (IEM_IS_MODRM_REG_MODE(bRm))
3170 {
3171 /* register operand */
3172 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3173 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3175 IEM_MC_LOCAL(uint64_t, u64Tmp);
3176 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3177
3178 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3179 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3180 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3181 IEM_MC_REF_EFLAGS(pEFlags);
3182 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3183 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3184
3185 IEM_MC_ADVANCE_RIP_AND_FINISH();
3186 IEM_MC_END();
3187 }
3188 else
3189 {
3190 /* memory operand */
3191 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3194
3195 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the threaded parameter count. */
3196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3197
3198 IEM_MC_LOCAL(uint64_t, u64Tmp);
3199 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3200
3201 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3202 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3203 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3204 IEM_MC_REF_EFLAGS(pEFlags);
3205 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3206 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3207
3208 IEM_MC_ADVANCE_RIP_AND_FINISH();
3209 IEM_MC_END();
3210 }
3211 break;
3212 }
3213
3214 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3215 }
3216}
3217
3218
3219/**
3220 * @opcode 0x6c
3221 * @opfltest iopl,df
3222 */
3223FNIEMOP_DEF(iemOp_insb_Yb_DX)
3224{
3225 IEMOP_HLP_MIN_186();
3226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3227 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3228 {
3229 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3230 switch (pVCpu->iem.s.enmEffAddrMode)
3231 {
3232 case IEMMODE_16BIT:
3233 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3234 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3235 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3236 iemCImpl_rep_ins_op8_addr16, false);
3237 case IEMMODE_32BIT:
3238 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3239 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3240 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3241 iemCImpl_rep_ins_op8_addr32, false);
3242 case IEMMODE_64BIT:
3243 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3244 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3245 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3246 iemCImpl_rep_ins_op8_addr64, false);
3247 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3248 }
3249 }
3250 else
3251 {
3252 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3253 switch (pVCpu->iem.s.enmEffAddrMode)
3254 {
3255 case IEMMODE_16BIT:
3256 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3257 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3258 iemCImpl_ins_op8_addr16, false);
3259 case IEMMODE_32BIT:
3260 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3261 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3262 iemCImpl_ins_op8_addr32, false);
3263 case IEMMODE_64BIT:
3264 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3265 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3266 iemCImpl_ins_op8_addr64, false);
3267 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3268 }
3269 }
3270}
3271
3272
3273/**
3274 * @opcode 0x6d
3275 * @opfltest iopl,df
3276 */
3277FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3278{
3279 IEMOP_HLP_MIN_186();
3280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3281 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3282 {
3283 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3284 switch (pVCpu->iem.s.enmEffOpSize)
3285 {
3286 case IEMMODE_16BIT:
3287 switch (pVCpu->iem.s.enmEffAddrMode)
3288 {
3289 case IEMMODE_16BIT:
3290 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3291 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3292 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3293 iemCImpl_rep_ins_op16_addr16, false);
3294 case IEMMODE_32BIT:
3295 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3296 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3297 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3298 iemCImpl_rep_ins_op16_addr32, false);
3299 case IEMMODE_64BIT:
3300 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3301 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3302 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3303 iemCImpl_rep_ins_op16_addr64, false);
3304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3305 }
3306 break;
3307 case IEMMODE_64BIT:
3308 case IEMMODE_32BIT:
3309 switch (pVCpu->iem.s.enmEffAddrMode)
3310 {
3311 case IEMMODE_16BIT:
3312 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3313 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3314 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3315 iemCImpl_rep_ins_op32_addr16, false);
3316 case IEMMODE_32BIT:
3317 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3318 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3319 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3320 iemCImpl_rep_ins_op32_addr32, false);
3321 case IEMMODE_64BIT:
3322 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3323 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3324 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3325 iemCImpl_rep_ins_op32_addr64, false);
3326 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3327 }
3328 break;
3329 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3330 }
3331 }
3332 else
3333 {
3334 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3335 switch (pVCpu->iem.s.enmEffOpSize)
3336 {
3337 case IEMMODE_16BIT:
3338 switch (pVCpu->iem.s.enmEffAddrMode)
3339 {
3340 case IEMMODE_16BIT:
3341 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3342 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3343 iemCImpl_ins_op16_addr16, false);
3344 case IEMMODE_32BIT:
3345 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3346 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3347 iemCImpl_ins_op16_addr32, false);
3348 case IEMMODE_64BIT:
3349 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3350 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3351 iemCImpl_ins_op16_addr64, false);
3352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3353 }
3354 break;
3355 case IEMMODE_64BIT:
3356 case IEMMODE_32BIT:
3357 switch (pVCpu->iem.s.enmEffAddrMode)
3358 {
3359 case IEMMODE_16BIT:
3360 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3361 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3362 iemCImpl_ins_op32_addr16, false);
3363 case IEMMODE_32BIT:
3364 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3365 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3366 iemCImpl_ins_op32_addr32, false);
3367 case IEMMODE_64BIT:
3368 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3369 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3370 iemCImpl_ins_op32_addr64, false);
3371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3372 }
3373 break;
3374 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3375 }
3376 }
3377}
3378
3379
3380/**
3381 * @opcode 0x6e
3382 * @opfltest iopl,df
3383 */
3384FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3385{
3386 IEMOP_HLP_MIN_186();
3387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3388 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3389 {
3390 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3391 switch (pVCpu->iem.s.enmEffAddrMode)
3392 {
3393 case IEMMODE_16BIT:
3394 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3395 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3396 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3397 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3398 case IEMMODE_32BIT:
3399 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3400 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3401 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3402 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3403 case IEMMODE_64BIT:
3404 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3405 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3406 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3407 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3408 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3409 }
3410 }
3411 else
3412 {
3413 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3414 switch (pVCpu->iem.s.enmEffAddrMode)
3415 {
3416 case IEMMODE_16BIT:
3417 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3418 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3419 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3420 case IEMMODE_32BIT:
3421 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3422 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3423 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3424 case IEMMODE_64BIT:
3425 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3426 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3427 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3429 }
3430 }
3431}
3432
3433
3434/**
3435 * @opcode 0x6f
3436 * @opfltest iopl,df
3437 */
3438FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3439{
3440 IEMOP_HLP_MIN_186();
3441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3442 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3443 {
3444 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3445 switch (pVCpu->iem.s.enmEffOpSize)
3446 {
3447 case IEMMODE_16BIT:
3448 switch (pVCpu->iem.s.enmEffAddrMode)
3449 {
3450 case IEMMODE_16BIT:
3451 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3452 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3453 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3454 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3455 case IEMMODE_32BIT:
3456 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3457 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3458 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3459 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3460 case IEMMODE_64BIT:
3461 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3462 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3463 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3464 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3465 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3466 }
3467 break;
3468 case IEMMODE_64BIT:
3469 case IEMMODE_32BIT:
3470 switch (pVCpu->iem.s.enmEffAddrMode)
3471 {
3472 case IEMMODE_16BIT:
3473 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3474 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3475 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3476 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3477 case IEMMODE_32BIT:
3478 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3479 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3480 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3481 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3482 case IEMMODE_64BIT:
3483 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3484 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3485 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3486 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3488 }
3489 break;
3490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3491 }
3492 }
3493 else
3494 {
3495 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3496 switch (pVCpu->iem.s.enmEffOpSize)
3497 {
3498 case IEMMODE_16BIT:
3499 switch (pVCpu->iem.s.enmEffAddrMode)
3500 {
3501 case IEMMODE_16BIT:
3502 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3503 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3504 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3505 case IEMMODE_32BIT:
3506 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3507 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3508 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3509 case IEMMODE_64BIT:
3510 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3511 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3512 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3513 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3514 }
3515 break;
3516 case IEMMODE_64BIT:
3517 case IEMMODE_32BIT:
3518 switch (pVCpu->iem.s.enmEffAddrMode)
3519 {
3520 case IEMMODE_16BIT:
3521 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3522 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3523 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3524 case IEMMODE_32BIT:
3525 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3526 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3527 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3528 case IEMMODE_64BIT:
3529 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3530 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3531 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3532 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3533 }
3534 break;
3535 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3536 }
3537 }
3538}
3539
3540
3541/**
3542 * @opcode 0x70
3543 * @opfltest of
3544 */
3545FNIEMOP_DEF(iemOp_jo_Jb)
3546{
3547 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3548 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3549 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3550
3551 IEM_MC_BEGIN(0, 0, 0, 0);
3552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3553 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3554 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3555 } IEM_MC_ELSE() {
3556 IEM_MC_ADVANCE_RIP_AND_FINISH();
3557 } IEM_MC_ENDIF();
3558 IEM_MC_END();
3559}
3560
3561
3562/**
3563 * @opcode 0x71
3564 * @opfltest of
3565 */
3566FNIEMOP_DEF(iemOp_jno_Jb)
3567{
3568 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3569 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3570 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3571
3572 IEM_MC_BEGIN(0, 0, 0, 0);
3573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3574 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3575 IEM_MC_ADVANCE_RIP_AND_FINISH();
3576 } IEM_MC_ELSE() {
3577 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3578 } IEM_MC_ENDIF();
3579 IEM_MC_END();
3580}
3581
3582/**
3583 * @opcode 0x72
3584 * @opfltest cf
3585 */
3586FNIEMOP_DEF(iemOp_jc_Jb)
3587{
3588 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3589 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3590 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3591
3592 IEM_MC_BEGIN(0, 0, 0, 0);
3593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3594 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3595 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3596 } IEM_MC_ELSE() {
3597 IEM_MC_ADVANCE_RIP_AND_FINISH();
3598 } IEM_MC_ENDIF();
3599 IEM_MC_END();
3600}
3601
3602
3603/**
3604 * @opcode 0x73
3605 * @opfltest cf
3606 */
3607FNIEMOP_DEF(iemOp_jnc_Jb)
3608{
3609 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3610 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3611 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3612
3613 IEM_MC_BEGIN(0, 0, 0, 0);
3614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3615 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3616 IEM_MC_ADVANCE_RIP_AND_FINISH();
3617 } IEM_MC_ELSE() {
3618 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3619 } IEM_MC_ENDIF();
3620 IEM_MC_END();
3621}
3622
3623
3624/**
3625 * @opcode 0x74
3626 * @opfltest zf
3627 */
3628FNIEMOP_DEF(iemOp_je_Jb)
3629{
3630 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3631 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3632 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3633
3634 IEM_MC_BEGIN(0, 0, 0, 0);
3635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3636 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3637 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3638 } IEM_MC_ELSE() {
3639 IEM_MC_ADVANCE_RIP_AND_FINISH();
3640 } IEM_MC_ENDIF();
3641 IEM_MC_END();
3642}
3643
3644
3645/**
3646 * @opcode 0x75
3647 * @opfltest zf
3648 */
3649FNIEMOP_DEF(iemOp_jne_Jb)
3650{
3651 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3652 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3653 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3654
3655 IEM_MC_BEGIN(0, 0, 0, 0);
3656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3657 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3658 IEM_MC_ADVANCE_RIP_AND_FINISH();
3659 } IEM_MC_ELSE() {
3660 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3661 } IEM_MC_ENDIF();
3662 IEM_MC_END();
3663}
3664
3665
3666/**
3667 * @opcode 0x76
3668 * @opfltest cf,zf
3669 */
3670FNIEMOP_DEF(iemOp_jbe_Jb)
3671{
3672 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3673 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3674 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3675
3676 IEM_MC_BEGIN(0, 0, 0, 0);
3677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3678 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3679 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3680 } IEM_MC_ELSE() {
3681 IEM_MC_ADVANCE_RIP_AND_FINISH();
3682 } IEM_MC_ENDIF();
3683 IEM_MC_END();
3684}
3685
3686
3687/**
3688 * @opcode 0x77
3689 * @opfltest cf,zf
3690 */
3691FNIEMOP_DEF(iemOp_jnbe_Jb)
3692{
3693 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3694 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3695 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3696
3697 IEM_MC_BEGIN(0, 0, 0, 0);
3698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3699 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3700 IEM_MC_ADVANCE_RIP_AND_FINISH();
3701 } IEM_MC_ELSE() {
3702 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3703 } IEM_MC_ENDIF();
3704 IEM_MC_END();
3705}
3706
3707
3708/**
3709 * @opcode 0x78
3710 * @opfltest sf
3711 */
3712FNIEMOP_DEF(iemOp_js_Jb)
3713{
3714 IEMOP_MNEMONIC(js_Jb, "js Jb");
3715 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3716 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3717
3718 IEM_MC_BEGIN(0, 0, 0, 0);
3719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3720 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3721 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3722 } IEM_MC_ELSE() {
3723 IEM_MC_ADVANCE_RIP_AND_FINISH();
3724 } IEM_MC_ENDIF();
3725 IEM_MC_END();
3726}
3727
3728
3729/**
3730 * @opcode 0x79
3731 * @opfltest sf
3732 */
3733FNIEMOP_DEF(iemOp_jns_Jb)
3734{
3735 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3736 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3737 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3738
3739 IEM_MC_BEGIN(0, 0, 0, 0);
3740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3741 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3742 IEM_MC_ADVANCE_RIP_AND_FINISH();
3743 } IEM_MC_ELSE() {
3744 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3745 } IEM_MC_ENDIF();
3746 IEM_MC_END();
3747}
3748
3749
3750/**
3751 * @opcode 0x7a
3752 * @opfltest pf
3753 */
3754FNIEMOP_DEF(iemOp_jp_Jb)
3755{
3756 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3757 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3758 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3759
3760 IEM_MC_BEGIN(0, 0, 0, 0);
3761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3762 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3763 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3764 } IEM_MC_ELSE() {
3765 IEM_MC_ADVANCE_RIP_AND_FINISH();
3766 } IEM_MC_ENDIF();
3767 IEM_MC_END();
3768}
3769
3770
3771/**
3772 * @opcode 0x7b
3773 * @opfltest pf
3774 */
3775FNIEMOP_DEF(iemOp_jnp_Jb)
3776{
3777 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3778 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3779 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3780
3781 IEM_MC_BEGIN(0, 0, 0, 0);
3782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3783 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3784 IEM_MC_ADVANCE_RIP_AND_FINISH();
3785 } IEM_MC_ELSE() {
3786 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3787 } IEM_MC_ENDIF();
3788 IEM_MC_END();
3789}
3790
3791
3792/**
3793 * @opcode 0x7c
3794 * @opfltest sf,of
3795 */
3796FNIEMOP_DEF(iemOp_jl_Jb)
3797{
3798 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3799 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3800 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3801
3802 IEM_MC_BEGIN(0, 0, 0, 0);
3803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3804 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3805 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3806 } IEM_MC_ELSE() {
3807 IEM_MC_ADVANCE_RIP_AND_FINISH();
3808 } IEM_MC_ENDIF();
3809 IEM_MC_END();
3810}
3811
3812
3813/**
3814 * @opcode 0x7d
3815 * @opfltest sf,of
3816 */
3817FNIEMOP_DEF(iemOp_jnl_Jb)
3818{
3819 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3820 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3821 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3822
3823 IEM_MC_BEGIN(0, 0, 0, 0);
3824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3825 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3826 IEM_MC_ADVANCE_RIP_AND_FINISH();
3827 } IEM_MC_ELSE() {
3828 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3829 } IEM_MC_ENDIF();
3830 IEM_MC_END();
3831}
3832
3833
3834/**
3835 * @opcode 0x7e
3836 * @opfltest zf,sf,of
3837 */
3838FNIEMOP_DEF(iemOp_jle_Jb)
3839{
3840 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3841 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3842 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3843
3844 IEM_MC_BEGIN(0, 0, 0, 0);
3845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3846 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3847 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3848 } IEM_MC_ELSE() {
3849 IEM_MC_ADVANCE_RIP_AND_FINISH();
3850 } IEM_MC_ENDIF();
3851 IEM_MC_END();
3852}
3853
3854
3855/**
3856 * @opcode 0x7f
3857 * @opfltest zf,sf,of
3858 */
3859FNIEMOP_DEF(iemOp_jnle_Jb)
3860{
3861 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3862 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3863 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3864
3865 IEM_MC_BEGIN(0, 0, 0, 0);
3866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3867 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3868 IEM_MC_ADVANCE_RIP_AND_FINISH();
3869 } IEM_MC_ELSE() {
3870 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3871 } IEM_MC_ENDIF();
3872 IEM_MC_END();
3873}
3874
3875
3876/**
3877 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3878 * iemOp_Grp1_Eb_Ib_80.
3879 */
3880#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
3881 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3882 { \
3883 /* register target */ \
3884 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3885 IEM_MC_BEGIN(3, 0, 0, 0); \
3886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3887 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3888 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3889 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3890 \
3891 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3892 IEM_MC_REF_EFLAGS(pEFlags); \
3893 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3894 \
3895 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3896 IEM_MC_END(); \
3897 } \
3898 else \
3899 { \
3900 /* memory target */ \
3901 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
3902 { \
3903 IEM_MC_BEGIN(3, 3, 0, 0); \
3904 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3905 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3907 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3908 \
3909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3910 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3911 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3912 IEMOP_HLP_DONE_DECODING(); \
3913 \
3914 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3915 IEM_MC_FETCH_EFLAGS(EFlags); \
3916 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3917 \
3918 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
3919 IEM_MC_COMMIT_EFLAGS(EFlags); \
3920 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3921 IEM_MC_END(); \
3922 } \
3923 else \
3924 { \
3925 (void)0
3926
3927#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3928 IEM_MC_BEGIN(3, 3, 0, 0); \
3929 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3930 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3932 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3933 \
3934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3935 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3936 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3937 IEMOP_HLP_DONE_DECODING(); \
3938 \
3939 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3940 IEM_MC_FETCH_EFLAGS(EFlags); \
3941 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
3942 \
3943 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
3944 IEM_MC_COMMIT_EFLAGS(EFlags); \
3945 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3946 IEM_MC_END(); \
3947 } \
3948 } \
3949 (void)0
3950
3951#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
3952 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3953 { \
3954 /* register target */ \
3955 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3956 IEM_MC_BEGIN(3, 0, 0, 0); \
3957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3958 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3959 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3960 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3961 \
3962 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3963 IEM_MC_REF_EFLAGS(pEFlags); \
3964 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3965 \
3966 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3967 IEM_MC_END(); \
3968 } \
3969 else \
3970 { \
3971 /* memory target */ \
3972 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
3973 { \
3974 IEM_MC_BEGIN(3, 3, 0, 0); \
3975 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
3976 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3978 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3979 \
3980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3981 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3982 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3983 IEMOP_HLP_DONE_DECODING(); \
3984 \
3985 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3986 IEM_MC_FETCH_EFLAGS(EFlags); \
3987 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3988 \
3989 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
3990 IEM_MC_COMMIT_EFLAGS(EFlags); \
3991 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3992 IEM_MC_END(); \
3993 } \
3994 else \
3995 { \
3996 (void)0
3997
3998#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
3999 IEMOP_HLP_DONE_DECODING(); \
4000 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4001 } \
4002 } \
4003 (void)0
4004
4005
4006
4007/**
4008 * @opmaps grp1_80,grp1_83
4009 * @opcode /0
4010 * @opflclass arithmetic
4011 */
4012FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
4013{
4014 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
4015 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
4016 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
4017}
4018
4019
4020/**
4021 * @opmaps grp1_80,grp1_83
4022 * @opcode /1
4023 * @opflclass logical
4024 */
4025FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
4026{
4027 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
4028 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
4029 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
4030}
4031
4032
4033/**
4034 * @opmaps grp1_80,grp1_83
4035 * @opcode /2
4036 * @opflclass arithmetic_carry
4037 */
4038FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4039{
4040 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4041 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4042 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4043}
4044
4045
4046/**
4047 * @opmaps grp1_80,grp1_83
4048 * @opcode /3
4049 * @opflclass arithmetic_carry
4050 */
4051FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4052{
4053 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4054 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4055 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4056}
4057
4058
4059/**
4060 * @opmaps grp1_80,grp1_83
4061 * @opcode /4
4062 * @opflclass logical
4063 */
4064FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4065{
4066 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4067 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4068 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4069}
4070
4071
4072/**
4073 * @opmaps grp1_80,grp1_83
4074 * @opcode /5
4075 * @opflclass arithmetic
4076 */
4077FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4078{
4079 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4080 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4081 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4082}
4083
4084
4085/**
4086 * @opmaps grp1_80,grp1_83
4087 * @opcode /6
4088 * @opflclass logical
4089 */
4090FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4091{
4092 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4093 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4094 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4095}
4096
4097
4098/**
4099 * @opmaps grp1_80,grp1_83
4100 * @opcode /7
4101 * @opflclass arithmetic
4102 */
4103FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4104{
4105 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4106 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4107 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4108}
4109
4110
4111/**
4112 * @opcode 0x80
4113 */
4114FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4115{
4116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4117 switch (IEM_GET_MODRM_REG_8(bRm))
4118 {
4119 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4120 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4121 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4122 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4123 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4124 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4125 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4126 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4128 }
4129}
4130
4131
4132/**
4133 * Body for a group 1 binary operator.
4134 */
4135#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4136 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4137 { \
4138 /* register target */ \
4139 switch (pVCpu->iem.s.enmEffOpSize) \
4140 { \
4141 case IEMMODE_16BIT: \
4142 { \
4143 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4144 IEM_MC_BEGIN(3, 0, 0, 0); \
4145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4146 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4147 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4148 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4149 \
4150 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4151 IEM_MC_REF_EFLAGS(pEFlags); \
4152 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4153 \
4154 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4155 IEM_MC_END(); \
4156 break; \
4157 } \
4158 \
4159 case IEMMODE_32BIT: \
4160 { \
4161 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4162 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4164 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4165 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4166 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4167 \
4168 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4169 IEM_MC_REF_EFLAGS(pEFlags); \
4170 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4171 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4172 \
4173 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4174 IEM_MC_END(); \
4175 break; \
4176 } \
4177 \
4178 case IEMMODE_64BIT: \
4179 { \
4180 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4181 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4183 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4184 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4185 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4186 \
4187 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4188 IEM_MC_REF_EFLAGS(pEFlags); \
4189 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4190 \
4191 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4192 IEM_MC_END(); \
4193 break; \
4194 } \
4195 \
4196 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4197 } \
4198 } \
4199 else \
4200 { \
4201 /* memory target */ \
4202 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4203 { \
4204 switch (pVCpu->iem.s.enmEffOpSize) \
4205 { \
4206 case IEMMODE_16BIT: \
4207 { \
4208 IEM_MC_BEGIN(3, 3, 0, 0); \
4209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4211 \
4212 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4213 IEMOP_HLP_DONE_DECODING(); \
4214 \
4215 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4216 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4217 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4218 \
4219 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4220 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4221 IEM_MC_FETCH_EFLAGS(EFlags); \
4222 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4223 \
4224 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4225 IEM_MC_COMMIT_EFLAGS(EFlags); \
4226 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4227 IEM_MC_END(); \
4228 break; \
4229 } \
4230 \
4231 case IEMMODE_32BIT: \
4232 { \
4233 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4236 \
4237 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4238 IEMOP_HLP_DONE_DECODING(); \
4239 \
4240 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4241 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4242 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4243 \
4244 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4245 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4246 IEM_MC_FETCH_EFLAGS(EFlags); \
4247 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4248 \
4249 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4250 IEM_MC_COMMIT_EFLAGS(EFlags); \
4251 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4252 IEM_MC_END(); \
4253 break; \
4254 } \
4255 \
4256 case IEMMODE_64BIT: \
4257 { \
4258 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4259 \
4260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4262 \
4263 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4264 IEMOP_HLP_DONE_DECODING(); \
4265 \
4266 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4267 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4268 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4269 \
4270 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4271 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4272 IEM_MC_FETCH_EFLAGS(EFlags); \
4273 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4274 \
4275 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4276 IEM_MC_COMMIT_EFLAGS(EFlags); \
4277 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4278 IEM_MC_END(); \
4279 break; \
4280 } \
4281 \
4282 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4283 } \
4284 } \
4285 else \
4286 { \
4287 (void)0
4288/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4289#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4290 switch (pVCpu->iem.s.enmEffOpSize) \
4291 { \
4292 case IEMMODE_16BIT: \
4293 { \
4294 IEM_MC_BEGIN(3, 3, 0, 0); \
4295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4297 \
4298 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4299 IEMOP_HLP_DONE_DECODING(); \
4300 \
4301 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4302 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4303 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4304 \
4305 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4306 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4307 IEM_MC_FETCH_EFLAGS(EFlags); \
4308 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4309 \
4310 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4311 IEM_MC_COMMIT_EFLAGS(EFlags); \
4312 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4313 IEM_MC_END(); \
4314 break; \
4315 } \
4316 \
4317 case IEMMODE_32BIT: \
4318 { \
4319 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4322 \
4323 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4324 IEMOP_HLP_DONE_DECODING(); \
4325 \
4326 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4327 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4328 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4329 \
4330 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4331 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4332 IEM_MC_FETCH_EFLAGS(EFlags); \
4333 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4334 \
4335 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4336 IEM_MC_COMMIT_EFLAGS(EFlags); \
4337 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4338 IEM_MC_END(); \
4339 break; \
4340 } \
4341 \
4342 case IEMMODE_64BIT: \
4343 { \
4344 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4347 \
4348 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4349 IEMOP_HLP_DONE_DECODING(); \
4350 \
4351 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4352 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4353 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4354 \
4355 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4356 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4357 IEM_MC_FETCH_EFLAGS(EFlags); \
4358 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4359 \
4360 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4361 IEM_MC_COMMIT_EFLAGS(EFlags); \
4362 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4363 IEM_MC_END(); \
4364 break; \
4365 } \
4366 \
4367 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4368 } \
4369 } \
4370 } \
4371 (void)0
4372
4373/* read-only version */
4374#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4375 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4376 { \
4377 /* register target */ \
4378 switch (pVCpu->iem.s.enmEffOpSize) \
4379 { \
4380 case IEMMODE_16BIT: \
4381 { \
4382 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4383 IEM_MC_BEGIN(3, 0, 0, 0); \
4384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4385 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4386 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4387 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4388 \
4389 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4390 IEM_MC_REF_EFLAGS(pEFlags); \
4391 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4392 \
4393 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4394 IEM_MC_END(); \
4395 break; \
4396 } \
4397 \
4398 case IEMMODE_32BIT: \
4399 { \
4400 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4401 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4403 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4404 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4405 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4406 \
4407 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4408 IEM_MC_REF_EFLAGS(pEFlags); \
4409 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4410 \
4411 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4412 IEM_MC_END(); \
4413 break; \
4414 } \
4415 \
4416 case IEMMODE_64BIT: \
4417 { \
4418 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4419 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4421 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4422 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4423 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4424 \
4425 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4426 IEM_MC_REF_EFLAGS(pEFlags); \
4427 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4428 \
4429 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4430 IEM_MC_END(); \
4431 break; \
4432 } \
4433 \
4434 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4435 } \
4436 } \
4437 else \
4438 { \
4439 /* memory target */ \
4440 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4441 { \
4442 switch (pVCpu->iem.s.enmEffOpSize) \
4443 { \
4444 case IEMMODE_16BIT: \
4445 { \
4446 IEM_MC_BEGIN(3, 3, 0, 0); \
4447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4449 \
4450 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4451 IEMOP_HLP_DONE_DECODING(); \
4452 \
4453 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4454 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4455 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4456 \
4457 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4458 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4459 IEM_MC_FETCH_EFLAGS(EFlags); \
4460 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4461 \
4462 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4463 IEM_MC_COMMIT_EFLAGS(EFlags); \
4464 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4465 IEM_MC_END(); \
4466 break; \
4467 } \
4468 \
4469 case IEMMODE_32BIT: \
4470 { \
4471 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4474 \
4475 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4476 IEMOP_HLP_DONE_DECODING(); \
4477 \
4478 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4479 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4480 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4481 \
4482 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4483 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4484 IEM_MC_FETCH_EFLAGS(EFlags); \
4485 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4486 \
4487 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4488 IEM_MC_COMMIT_EFLAGS(EFlags); \
4489 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4490 IEM_MC_END(); \
4491 break; \
4492 } \
4493 \
4494 case IEMMODE_64BIT: \
4495 { \
4496 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4499 \
4500 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4501 IEMOP_HLP_DONE_DECODING(); \
4502 \
4503 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4504 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4505 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4506 \
4507 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4508 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4509 IEM_MC_FETCH_EFLAGS(EFlags); \
4510 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4511 \
4512 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4513 IEM_MC_COMMIT_EFLAGS(EFlags); \
4514 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4515 IEM_MC_END(); \
4516 break; \
4517 } \
4518 \
4519 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4520 } \
4521 } \
4522 else \
4523 { \
4524 IEMOP_HLP_DONE_DECODING(); \
4525 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4526 } \
4527 } \
4528 (void)0
4529
4530
4531/**
4532 * @opmaps grp1_81
4533 * @opcode /0
4534 * @opflclass arithmetic
4535 */
4536FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4537{
4538 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4539 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4540 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4541}
4542
4543
4544/**
4545 * @opmaps grp1_81
4546 * @opcode /1
4547 * @opflclass logical
4548 */
4549FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4550{
4551 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4552 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4553 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4554}
4555
4556
4557/**
4558 * @opmaps grp1_81
4559 * @opcode /2
4560 * @opflclass arithmetic_carry
4561 */
4562FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4563{
4564 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4565 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4566 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4567}
4568
4569
4570/**
4571 * @opmaps grp1_81
4572 * @opcode /3
4573 * @opflclass arithmetic_carry
4574 */
4575FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4576{
4577 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4578 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4579 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4580}
4581
4582
4583/**
4584 * @opmaps grp1_81
4585 * @opcode /4
4586 * @opflclass logical
4587 */
4588FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4589{
4590 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4591 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4592 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4593}
4594
4595
4596/**
4597 * @opmaps grp1_81
4598 * @opcode /5
4599 * @opflclass arithmetic
4600 */
4601FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4602{
4603 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4604 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4605 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4606}
4607
4608
4609/**
4610 * @opmaps grp1_81
4611 * @opcode /6
4612 * @opflclass logical
4613 */
4614FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4615{
4616 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4617 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4618 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4619}
4620
4621
4622/**
4623 * @opmaps grp1_81
4624 * @opcode /7
4625 * @opflclass arithmetic
4626 */
4627FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4628{
4629 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4630 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4631}
4632
4633
4634/**
4635 * @opcode 0x81
4636 */
4637FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4638{
4639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4640 switch (IEM_GET_MODRM_REG_8(bRm))
4641 {
4642 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4643 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4644 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4645 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4646 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4647 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4648 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4649 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4650 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4651 }
4652}
4653
4654
4655/**
4656 * @opcode 0x82
4657 * @opmnemonic grp1_82
4658 * @opgroup og_groups
4659 */
4660FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4661{
4662 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4663 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4664}
4665
4666
4667/**
4668 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4669 * iemOp_Grp1_Ev_Ib.
4670 */
4671#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4672 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4673 { \
4674 /* \
4675 * Register target \
4676 */ \
4677 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4678 switch (pVCpu->iem.s.enmEffOpSize) \
4679 { \
4680 case IEMMODE_16BIT: \
4681 IEM_MC_BEGIN(3, 0, 0, 0); \
4682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4683 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4684 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4685 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4686 \
4687 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4688 IEM_MC_REF_EFLAGS(pEFlags); \
4689 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4690 \
4691 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4692 IEM_MC_END(); \
4693 break; \
4694 \
4695 case IEMMODE_32BIT: \
4696 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4698 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4699 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4700 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4701 \
4702 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4703 IEM_MC_REF_EFLAGS(pEFlags); \
4704 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4705 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4706 \
4707 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4708 IEM_MC_END(); \
4709 break; \
4710 \
4711 case IEMMODE_64BIT: \
4712 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4714 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4715 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4716 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4717 \
4718 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4719 IEM_MC_REF_EFLAGS(pEFlags); \
4720 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4721 \
4722 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4723 IEM_MC_END(); \
4724 break; \
4725 \
4726 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4727 } \
4728 } \
4729 else \
4730 { \
4731 /* \
4732 * Memory target. \
4733 */ \
4734 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4735 { \
4736 switch (pVCpu->iem.s.enmEffOpSize) \
4737 { \
4738 case IEMMODE_16BIT: \
4739 IEM_MC_BEGIN(3, 3, 0, 0); \
4740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4742 \
4743 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4744 IEMOP_HLP_DONE_DECODING(); \
4745 \
4746 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4747 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4748 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4749 \
4750 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4751 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4752 IEM_MC_FETCH_EFLAGS(EFlags); \
4753 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4754 \
4755 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4756 IEM_MC_COMMIT_EFLAGS(EFlags); \
4757 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4758 IEM_MC_END(); \
4759 break; \
4760 \
4761 case IEMMODE_32BIT: \
4762 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4765 \
4766 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4767 IEMOP_HLP_DONE_DECODING(); \
4768 \
4769 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4770 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4771 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4772 \
4773 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4774 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4775 IEM_MC_FETCH_EFLAGS(EFlags); \
4776 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4777 \
4778 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4779 IEM_MC_COMMIT_EFLAGS(EFlags); \
4780 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4781 IEM_MC_END(); \
4782 break; \
4783 \
4784 case IEMMODE_64BIT: \
4785 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4788 \
4789 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4790 IEMOP_HLP_DONE_DECODING(); \
4791 \
4792 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4793 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4794 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4795 \
4796 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4797 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4798 IEM_MC_FETCH_EFLAGS(EFlags); \
4799 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4800 \
4801 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4802 IEM_MC_COMMIT_EFLAGS(EFlags); \
4803 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4804 IEM_MC_END(); \
4805 break; \
4806 \
4807 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4808 } \
4809 } \
4810 else \
4811 { \
4812 (void)0
4813/* Separate macro to work around parsing issue in IEMAllInstPython.py */
4814#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4815 switch (pVCpu->iem.s.enmEffOpSize) \
4816 { \
4817 case IEMMODE_16BIT: \
4818 IEM_MC_BEGIN(3, 3, 0, 0); \
4819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4821 \
4822 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4823 IEMOP_HLP_DONE_DECODING(); \
4824 \
4825 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4826 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4827 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4828 \
4829 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4830 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4831 IEM_MC_FETCH_EFLAGS(EFlags); \
4832 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4833 \
4834 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4835 IEM_MC_COMMIT_EFLAGS(EFlags); \
4836 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4837 IEM_MC_END(); \
4838 break; \
4839 \
4840 case IEMMODE_32BIT: \
4841 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4844 \
4845 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4846 IEMOP_HLP_DONE_DECODING(); \
4847 \
4848 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4849 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4850 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4851 \
4852 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4853 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4854 IEM_MC_FETCH_EFLAGS(EFlags); \
4855 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4856 \
4857 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4858 IEM_MC_COMMIT_EFLAGS(EFlags); \
4859 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4860 IEM_MC_END(); \
4861 break; \
4862 \
4863 case IEMMODE_64BIT: \
4864 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4867 \
4868 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4869 IEMOP_HLP_DONE_DECODING(); \
4870 \
4871 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4872 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4873 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4874 \
4875 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4876 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4877 IEM_MC_FETCH_EFLAGS(EFlags); \
4878 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4879 \
4880 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4881 IEM_MC_COMMIT_EFLAGS(EFlags); \
4882 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4883 IEM_MC_END(); \
4884 break; \
4885 \
4886 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4887 } \
4888 } \
4889 } \
4890 (void)0
4891
4892/* read-only variant */
4893#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4894 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4895 { \
4896 /* \
4897 * Register target \
4898 */ \
4899 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4900 switch (pVCpu->iem.s.enmEffOpSize) \
4901 { \
4902 case IEMMODE_16BIT: \
4903 IEM_MC_BEGIN(3, 0, 0, 0); \
4904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4905 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4906 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4907 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4908 \
4909 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4910 IEM_MC_REF_EFLAGS(pEFlags); \
4911 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4912 \
4913 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4914 IEM_MC_END(); \
4915 break; \
4916 \
4917 case IEMMODE_32BIT: \
4918 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4920 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4921 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4922 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4923 \
4924 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4925 IEM_MC_REF_EFLAGS(pEFlags); \
4926 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4927 \
4928 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4929 IEM_MC_END(); \
4930 break; \
4931 \
4932 case IEMMODE_64BIT: \
4933 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4935 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4936 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4937 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4938 \
4939 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4940 IEM_MC_REF_EFLAGS(pEFlags); \
4941 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4942 \
4943 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4944 IEM_MC_END(); \
4945 break; \
4946 \
4947 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4948 } \
4949 } \
4950 else \
4951 { \
4952 /* \
4953 * Memory target. \
4954 */ \
4955 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4956 { \
4957 switch (pVCpu->iem.s.enmEffOpSize) \
4958 { \
4959 case IEMMODE_16BIT: \
4960 IEM_MC_BEGIN(3, 3, 0, 0); \
4961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4963 \
4964 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4965 IEMOP_HLP_DONE_DECODING(); \
4966 \
4967 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4968 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4969 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4970 \
4971 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4972 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4973 IEM_MC_FETCH_EFLAGS(EFlags); \
4974 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4975 \
4976 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4977 IEM_MC_COMMIT_EFLAGS(EFlags); \
4978 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4979 IEM_MC_END(); \
4980 break; \
4981 \
4982 case IEMMODE_32BIT: \
4983 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4984 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4986 \
4987 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4988 IEMOP_HLP_DONE_DECODING(); \
4989 \
4990 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4991 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4992 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4993 \
4994 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4995 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4996 IEM_MC_FETCH_EFLAGS(EFlags); \
4997 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4998 \
4999 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5000 IEM_MC_COMMIT_EFLAGS(EFlags); \
5001 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5002 IEM_MC_END(); \
5003 break; \
5004 \
5005 case IEMMODE_64BIT: \
5006 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5009 \
5010 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5011 IEMOP_HLP_DONE_DECODING(); \
5012 \
5013 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5014 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
5015 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5016 \
5017 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
5018 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5019 IEM_MC_FETCH_EFLAGS(EFlags); \
5020 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5021 \
5022 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5023 IEM_MC_COMMIT_EFLAGS(EFlags); \
5024 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5025 IEM_MC_END(); \
5026 break; \
5027 \
5028 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5029 } \
5030 } \
5031 else \
5032 { \
5033 IEMOP_HLP_DONE_DECODING(); \
5034 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5035 } \
5036 } \
5037 (void)0
5038
5039/**
5040 * @opmaps grp1_83
5041 * @opcode /0
5042 * @opflclass arithmetic
5043 */
5044FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5045{
5046 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5047 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
5048 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
5049}
5050
5051
5052/**
5053 * @opmaps grp1_83
5054 * @opcode /1
5055 * @opflclass logical
5056 */
5057FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5058{
5059 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5060 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
5061 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
5062}
5063
5064
5065/**
5066 * @opmaps grp1_83
5067 * @opcode /2
5068 * @opflclass arithmetic_carry
5069 */
5070FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5071{
5072 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5073 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5074 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5075}
5076
5077
5078/**
5079 * @opmaps grp1_83
5080 * @opcode /3
5081 * @opflclass arithmetic_carry
5082 */
5083FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5084{
5085 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5086 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5087 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5088}
5089
5090
5091/**
5092 * @opmaps grp1_83
5093 * @opcode /4
5094 * @opflclass logical
5095 */
5096FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5097{
5098 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5099 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5100 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5101}
5102
5103
5104/**
5105 * @opmaps grp1_83
5106 * @opcode /5
5107 * @opflclass arithmetic
5108 */
5109FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5110{
5111 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5112 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5113 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5114}
5115
5116
5117/**
5118 * @opmaps grp1_83
5119 * @opcode /6
5120 * @opflclass logical
5121 */
5122FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5123{
5124 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5125 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5126 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5127}
5128
5129
5130/**
5131 * @opmaps grp1_83
5132 * @opcode /7
5133 * @opflclass arithmetic
5134 */
5135FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5136{
5137 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5138 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5139}
5140
5141
5142/**
5143 * @opcode 0x83
5144 */
5145FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5146{
5147 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5148 to the 386 even if absent in the intel reference manuals and some
5149 3rd party opcode listings. */
5150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5151 switch (IEM_GET_MODRM_REG_8(bRm))
5152 {
5153 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5154 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5155 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5156 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5157 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5158 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5159 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5160 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5162 }
5163}
5164
5165
5166/**
5167 * @opcode 0x84
5168 * @opflclass logical
5169 */
5170FNIEMOP_DEF(iemOp_test_Eb_Gb)
5171{
5172 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5173 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5174 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8);
5175}
5176
5177
5178/**
5179 * @opcode 0x85
5180 * @opflclass logical
5181 */
5182FNIEMOP_DEF(iemOp_test_Ev_Gv)
5183{
5184 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5185 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5186 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64);
5187}
5188
5189
5190/**
5191 * @opcode 0x86
5192 */
5193FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5194{
5195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5196 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5197
5198 /*
5199 * If rm is denoting a register, no more instruction bytes.
5200 */
5201 if (IEM_IS_MODRM_REG_MODE(bRm))
5202 {
5203 IEM_MC_BEGIN(0, 2, 0, 0);
5204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5205 IEM_MC_LOCAL(uint8_t, uTmp1);
5206 IEM_MC_LOCAL(uint8_t, uTmp2);
5207
5208 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5209 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5210 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5211 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5212
5213 IEM_MC_ADVANCE_RIP_AND_FINISH();
5214 IEM_MC_END();
5215 }
5216 else
5217 {
5218 /*
5219 * We're accessing memory.
5220 */
5221#define IEMOP_XCHG_BYTE(a_fnWorker, a_Style) \
5222 IEM_MC_BEGIN(2, 4, 0, 0); \
5223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5224 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5225 IEM_MC_LOCAL(uint8_t, uTmpReg); \
5226 IEM_MC_ARG(uint8_t *, pu8Mem, 0); \
5227 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1); \
5228 \
5229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5230 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5231 IEM_MC_MEM_MAP_U8_##a_Style(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5232 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5233 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker, pu8Mem, pu8Reg); \
5234 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Style(bUnmapInfo); \
5235 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5236 \
5237 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5238 IEM_MC_END()
5239
5240 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5241 {
5242 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_locked,ATOMIC);
5243 }
5244 else
5245 {
5246 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_unlocked,RW);
5247 }
5248 }
5249}
5250
5251
5252/**
5253 * @opcode 0x87
5254 */
5255FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5256{
5257 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5258 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5259
5260 /*
5261 * If rm is denoting a register, no more instruction bytes.
5262 */
5263 if (IEM_IS_MODRM_REG_MODE(bRm))
5264 {
5265 switch (pVCpu->iem.s.enmEffOpSize)
5266 {
5267 case IEMMODE_16BIT:
5268 IEM_MC_BEGIN(0, 2, 0, 0);
5269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5270 IEM_MC_LOCAL(uint16_t, uTmp1);
5271 IEM_MC_LOCAL(uint16_t, uTmp2);
5272
5273 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5274 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5275 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5276 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5277
5278 IEM_MC_ADVANCE_RIP_AND_FINISH();
5279 IEM_MC_END();
5280 break;
5281
5282 case IEMMODE_32BIT:
5283 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5285 IEM_MC_LOCAL(uint32_t, uTmp1);
5286 IEM_MC_LOCAL(uint32_t, uTmp2);
5287
5288 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5289 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5290 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5291 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5292
5293 IEM_MC_ADVANCE_RIP_AND_FINISH();
5294 IEM_MC_END();
5295 break;
5296
5297 case IEMMODE_64BIT:
5298 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5300 IEM_MC_LOCAL(uint64_t, uTmp1);
5301 IEM_MC_LOCAL(uint64_t, uTmp2);
5302
5303 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5304 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5305 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5306 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5307
5308 IEM_MC_ADVANCE_RIP_AND_FINISH();
5309 IEM_MC_END();
5310 break;
5311
5312 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5313 }
5314 }
5315 else
5316 {
5317 /*
5318 * We're accessing memory.
5319 */
5320#define IEMOP_XCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
5321 do { \
5322 switch (pVCpu->iem.s.enmEffOpSize) \
5323 { \
5324 case IEMMODE_16BIT: \
5325 IEM_MC_BEGIN(2, 4, 0, 0); \
5326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5327 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5328 IEM_MC_LOCAL(uint16_t, uTmpReg); \
5329 IEM_MC_ARG(uint16_t *, pu16Mem, 0); \
5330 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1); \
5331 \
5332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5333 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5334 IEM_MC_MEM_MAP_U16_##a_Type(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5335 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5336 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker16, pu16Mem, pu16Reg); \
5337 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5338 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5339 \
5340 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5341 IEM_MC_END(); \
5342 break; \
5343 \
5344 case IEMMODE_32BIT: \
5345 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0); \
5346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5347 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5348 IEM_MC_LOCAL(uint32_t, uTmpReg); \
5349 IEM_MC_ARG(uint32_t *, pu32Mem, 0); \
5350 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1); \
5351 \
5352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5353 IEMOP_HLP_DONE_DECODING(); \
5354 IEM_MC_MEM_MAP_U32_##a_Type(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5355 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5356 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker32, pu32Mem, pu32Reg); \
5357 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5358 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5359 \
5360 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5361 IEM_MC_END(); \
5362 break; \
5363 \
5364 case IEMMODE_64BIT: \
5365 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0); \
5366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5367 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5368 IEM_MC_LOCAL(uint64_t, uTmpReg); \
5369 IEM_MC_ARG(uint64_t *, pu64Mem, 0); \
5370 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1); \
5371 \
5372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5373 IEMOP_HLP_DONE_DECODING(); \
5374 IEM_MC_MEM_MAP_U64_##a_Type(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5375 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5376 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker64, pu64Mem, pu64Reg); \
5377 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5378 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5379 \
5380 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5381 IEM_MC_END(); \
5382 break; \
5383 \
5384 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5385 } \
5386 } while (0)
5387 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5388 {
5389 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_locked, iemAImpl_xchg_u32_locked, iemAImpl_xchg_u64_locked,ATOMIC);
5390 }
5391 else
5392 {
5393 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_unlocked, iemAImpl_xchg_u32_unlocked, iemAImpl_xchg_u64_unlocked,RW);
5394 }
5395 }
5396}
5397
5398
5399/**
5400 * @opcode 0x88
5401 */
5402FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5403{
5404 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5405
5406 uint8_t bRm;
5407 IEM_OPCODE_GET_NEXT_U8(&bRm);
5408
5409 /*
5410 * If rm is denoting a register, no more instruction bytes.
5411 */
5412 if (IEM_IS_MODRM_REG_MODE(bRm))
5413 {
5414 IEM_MC_BEGIN(0, 1, 0, 0);
5415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5416 IEM_MC_LOCAL(uint8_t, u8Value);
5417 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5418 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5419 IEM_MC_ADVANCE_RIP_AND_FINISH();
5420 IEM_MC_END();
5421 }
5422 else
5423 {
5424 /*
5425 * We're writing a register to memory.
5426 */
5427 IEM_MC_BEGIN(0, 2, 0, 0);
5428 IEM_MC_LOCAL(uint8_t, u8Value);
5429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5432 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5433 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5434 IEM_MC_ADVANCE_RIP_AND_FINISH();
5435 IEM_MC_END();
5436 }
5437}
5438
5439
5440/**
5441 * @opcode 0x89
5442 */
5443FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5444{
5445 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5446
5447 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5448
5449 /*
5450 * If rm is denoting a register, no more instruction bytes.
5451 */
5452 if (IEM_IS_MODRM_REG_MODE(bRm))
5453 {
5454 switch (pVCpu->iem.s.enmEffOpSize)
5455 {
5456 case IEMMODE_16BIT:
5457 IEM_MC_BEGIN(0, 1, 0, 0);
5458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5459 IEM_MC_LOCAL(uint16_t, u16Value);
5460 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5461 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5462 IEM_MC_ADVANCE_RIP_AND_FINISH();
5463 IEM_MC_END();
5464 break;
5465
5466 case IEMMODE_32BIT:
5467 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5469 IEM_MC_LOCAL(uint32_t, u32Value);
5470 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5471 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5472 IEM_MC_ADVANCE_RIP_AND_FINISH();
5473 IEM_MC_END();
5474 break;
5475
5476 case IEMMODE_64BIT:
5477 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5479 IEM_MC_LOCAL(uint64_t, u64Value);
5480 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5481 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5482 IEM_MC_ADVANCE_RIP_AND_FINISH();
5483 IEM_MC_END();
5484 break;
5485
5486 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5487 }
5488 }
5489 else
5490 {
5491 /*
5492 * We're writing a register to memory.
5493 */
5494 switch (pVCpu->iem.s.enmEffOpSize)
5495 {
5496 case IEMMODE_16BIT:
5497 IEM_MC_BEGIN(0, 2, 0, 0);
5498 IEM_MC_LOCAL(uint16_t, u16Value);
5499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5502 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5503 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5504 IEM_MC_ADVANCE_RIP_AND_FINISH();
5505 IEM_MC_END();
5506 break;
5507
5508 case IEMMODE_32BIT:
5509 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5510 IEM_MC_LOCAL(uint32_t, u32Value);
5511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5514 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5515 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5516 IEM_MC_ADVANCE_RIP_AND_FINISH();
5517 IEM_MC_END();
5518 break;
5519
5520 case IEMMODE_64BIT:
5521 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5522 IEM_MC_LOCAL(uint64_t, u64Value);
5523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5526 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5527 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5528 IEM_MC_ADVANCE_RIP_AND_FINISH();
5529 IEM_MC_END();
5530 break;
5531
5532 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5533 }
5534 }
5535}
5536
5537
5538/**
5539 * @opcode 0x8a
5540 */
5541FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5542{
5543 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5544
5545 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5546
5547 /*
5548 * If rm is denoting a register, no more instruction bytes.
5549 */
5550 if (IEM_IS_MODRM_REG_MODE(bRm))
5551 {
5552 IEM_MC_BEGIN(0, 1, 0, 0);
5553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5554 IEM_MC_LOCAL(uint8_t, u8Value);
5555 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5556 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5557 IEM_MC_ADVANCE_RIP_AND_FINISH();
5558 IEM_MC_END();
5559 }
5560 else
5561 {
5562 /*
5563 * We're loading a register from memory.
5564 */
5565 IEM_MC_BEGIN(0, 2, 0, 0);
5566 IEM_MC_LOCAL(uint8_t, u8Value);
5567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5570 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5571 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5572 IEM_MC_ADVANCE_RIP_AND_FINISH();
5573 IEM_MC_END();
5574 }
5575}
5576
5577
5578/**
5579 * @opcode 0x8b
5580 */
5581FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5582{
5583 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5584
5585 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5586
5587 /*
5588 * If rm is denoting a register, no more instruction bytes.
5589 */
5590 if (IEM_IS_MODRM_REG_MODE(bRm))
5591 {
5592 switch (pVCpu->iem.s.enmEffOpSize)
5593 {
5594 case IEMMODE_16BIT:
5595 IEM_MC_BEGIN(0, 1, 0, 0);
5596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5597 IEM_MC_LOCAL(uint16_t, u16Value);
5598 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5599 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5600 IEM_MC_ADVANCE_RIP_AND_FINISH();
5601 IEM_MC_END();
5602 break;
5603
5604 case IEMMODE_32BIT:
5605 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5607 IEM_MC_LOCAL(uint32_t, u32Value);
5608 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5609 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5610 IEM_MC_ADVANCE_RIP_AND_FINISH();
5611 IEM_MC_END();
5612 break;
5613
5614 case IEMMODE_64BIT:
5615 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5617 IEM_MC_LOCAL(uint64_t, u64Value);
5618 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5619 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5620 IEM_MC_ADVANCE_RIP_AND_FINISH();
5621 IEM_MC_END();
5622 break;
5623
5624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5625 }
5626 }
5627 else
5628 {
5629 /*
5630 * We're loading a register from memory.
5631 */
5632 switch (pVCpu->iem.s.enmEffOpSize)
5633 {
5634 case IEMMODE_16BIT:
5635 IEM_MC_BEGIN(0, 2, 0, 0);
5636 IEM_MC_LOCAL(uint16_t, u16Value);
5637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5640 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5641 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5642 IEM_MC_ADVANCE_RIP_AND_FINISH();
5643 IEM_MC_END();
5644 break;
5645
5646 case IEMMODE_32BIT:
5647 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5648 IEM_MC_LOCAL(uint32_t, u32Value);
5649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5652 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5653 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5654 IEM_MC_ADVANCE_RIP_AND_FINISH();
5655 IEM_MC_END();
5656 break;
5657
5658 case IEMMODE_64BIT:
5659 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5660 IEM_MC_LOCAL(uint64_t, u64Value);
5661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5664 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5665 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5666 IEM_MC_ADVANCE_RIP_AND_FINISH();
5667 IEM_MC_END();
5668 break;
5669
5670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5671 }
5672 }
5673}
5674
5675
5676/**
5677 * opcode 0x63
5678 * @todo Table fixme
5679 */
5680FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5681{
5682 if (!IEM_IS_64BIT_CODE(pVCpu))
5683 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5684 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5685 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5686 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5687}
5688
5689
5690/**
5691 * @opcode 0x8c
5692 */
5693FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5694{
5695 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5696
5697 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5698
5699 /*
5700 * Check that the destination register exists. The REX.R prefix is ignored.
5701 */
5702 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5703 if (iSegReg > X86_SREG_GS)
5704 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5705
5706 /*
5707 * If rm is denoting a register, no more instruction bytes.
5708 * In that case, the operand size is respected and the upper bits are
5709 * cleared (starting with some pentium).
5710 */
5711 if (IEM_IS_MODRM_REG_MODE(bRm))
5712 {
5713 switch (pVCpu->iem.s.enmEffOpSize)
5714 {
5715 case IEMMODE_16BIT:
5716 IEM_MC_BEGIN(0, 1, 0, 0);
5717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5718 IEM_MC_LOCAL(uint16_t, u16Value);
5719 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5720 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5721 IEM_MC_ADVANCE_RIP_AND_FINISH();
5722 IEM_MC_END();
5723 break;
5724
5725 case IEMMODE_32BIT:
5726 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5728 IEM_MC_LOCAL(uint32_t, u32Value);
5729 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5730 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5731 IEM_MC_ADVANCE_RIP_AND_FINISH();
5732 IEM_MC_END();
5733 break;
5734
5735 case IEMMODE_64BIT:
5736 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5738 IEM_MC_LOCAL(uint64_t, u64Value);
5739 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5740 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5741 IEM_MC_ADVANCE_RIP_AND_FINISH();
5742 IEM_MC_END();
5743 break;
5744
5745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5746 }
5747 }
5748 else
5749 {
5750 /*
5751 * We're saving the register to memory. The access is word sized
5752 * regardless of operand size prefixes.
5753 */
5754#if 0 /* not necessary */
5755 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5756#endif
5757 IEM_MC_BEGIN(0, 2, 0, 0);
5758 IEM_MC_LOCAL(uint16_t, u16Value);
5759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5762 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5763 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5764 IEM_MC_ADVANCE_RIP_AND_FINISH();
5765 IEM_MC_END();
5766 }
5767}
5768
5769
5770
5771
5772/**
5773 * @opcode 0x8d
5774 */
5775FNIEMOP_DEF(iemOp_lea_Gv_M)
5776{
5777 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5778 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5779 if (IEM_IS_MODRM_REG_MODE(bRm))
5780 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5781
5782 switch (pVCpu->iem.s.enmEffOpSize)
5783 {
5784 case IEMMODE_16BIT:
5785 IEM_MC_BEGIN(0, 2, 0, 0);
5786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5789 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
5790 * operand-size, which is usually the case. It'll save an instruction
5791 * and a register. */
5792 IEM_MC_LOCAL(uint16_t, u16Cast);
5793 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5794 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5795 IEM_MC_ADVANCE_RIP_AND_FINISH();
5796 IEM_MC_END();
5797 break;
5798
5799 case IEMMODE_32BIT:
5800 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5804 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
5805 * operand-size, which is usually the case. It'll save an instruction
5806 * and a register. */
5807 IEM_MC_LOCAL(uint32_t, u32Cast);
5808 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5809 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5810 IEM_MC_ADVANCE_RIP_AND_FINISH();
5811 IEM_MC_END();
5812 break;
5813
5814 case IEMMODE_64BIT:
5815 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5819 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5820 IEM_MC_ADVANCE_RIP_AND_FINISH();
5821 IEM_MC_END();
5822 break;
5823
5824 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5825 }
5826}
5827
5828
5829/**
5830 * @opcode 0x8e
5831 */
5832FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5833{
5834 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5835
5836 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5837
5838 /*
5839 * The practical operand size is 16-bit.
5840 */
5841#if 0 /* not necessary */
5842 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5843#endif
5844
5845 /*
5846 * Check that the destination register exists and can be used with this
5847 * instruction. The REX.R prefix is ignored.
5848 */
5849 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5850 /** @todo r=bird: What does 8086 do here wrt CS? */
5851 if ( iSegReg == X86_SREG_CS
5852 || iSegReg > X86_SREG_GS)
5853 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5854
5855 /*
5856 * If rm is denoting a register, no more instruction bytes.
5857 *
5858 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
5859 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
5860 * register. This is a restriction of the current recompiler
5861 * approach.
5862 */
5863 if (IEM_IS_MODRM_REG_MODE(bRm))
5864 {
5865#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
5866 IEM_MC_BEGIN(2, 0, 0, a_fCImplFlags); \
5867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5868 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5869 IEM_MC_ARG(uint16_t, u16Value, 1); \
5870 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5871 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5872 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5873 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5874 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
5875 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
5876 iemCImpl_load_SReg, iSRegArg, u16Value); \
5877 IEM_MC_END()
5878
5879 if (iSegReg == X86_SREG_SS)
5880 {
5881 if (IEM_IS_32BIT_CODE(pVCpu))
5882 {
5883 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5884 }
5885 else
5886 {
5887 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5888 }
5889 }
5890 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5891 {
5892 IEMOP_MOV_SW_EV_REG_BODY(0);
5893 }
5894 else
5895 {
5896 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
5897 }
5898#undef IEMOP_MOV_SW_EV_REG_BODY
5899 }
5900 else
5901 {
5902 /*
5903 * We're loading the register from memory. The access is word sized
5904 * regardless of operand size prefixes.
5905 */
5906#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
5907 IEM_MC_BEGIN(2, 1, 0, a_fCImplFlags); \
5908 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5909 IEM_MC_ARG(uint16_t, u16Value, 1); \
5910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5913 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5914 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5915 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5916 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5917 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
5918 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
5919 iemCImpl_load_SReg, iSRegArg, u16Value); \
5920 IEM_MC_END()
5921
5922 if (iSegReg == X86_SREG_SS)
5923 {
5924 if (IEM_IS_32BIT_CODE(pVCpu))
5925 {
5926 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5927 }
5928 else
5929 {
5930 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5931 }
5932 }
5933 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5934 {
5935 IEMOP_MOV_SW_EV_MEM_BODY(0);
5936 }
5937 else
5938 {
5939 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
5940 }
5941#undef IEMOP_MOV_SW_EV_MEM_BODY
5942 }
5943}
5944
5945
5946/** Opcode 0x8f /0. */
5947FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5948{
5949 /* This bugger is rather annoying as it requires rSP to be updated before
5950 doing the effective address calculations. Will eventually require a
5951 split between the R/M+SIB decoding and the effective address
5952 calculation - which is something that is required for any attempt at
5953 reusing this code for a recompiler. It may also be good to have if we
5954 need to delay #UD exception caused by invalid lock prefixes.
5955
5956 For now, we'll do a mostly safe interpreter-only implementation here. */
5957 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5958 * now until tests show it's checked.. */
5959 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5960
5961 /* Register access is relatively easy and can share code. */
5962 if (IEM_IS_MODRM_REG_MODE(bRm))
5963 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5964
5965 /*
5966 * Memory target.
5967 *
5968 * Intel says that RSP is incremented before it's used in any effective
5969 * address calcuations. This means some serious extra annoyance here since
5970 * we decode and calculate the effective address in one step and like to
5971 * delay committing registers till everything is done.
5972 *
5973 * So, we'll decode and calculate the effective address twice. This will
5974 * require some recoding if turned into a recompiler.
5975 */
5976 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5977
5978#if 1 /* This can be compiled, optimize later if needed. */
5979 switch (pVCpu->iem.s.enmEffOpSize)
5980 {
5981 case IEMMODE_16BIT:
5982 IEM_MC_BEGIN(2, 0, 0, 0);
5983 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
5985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5986 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
5987 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
5988 IEM_MC_END();
5989 break;
5990
5991 case IEMMODE_32BIT:
5992 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
5993 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
5995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5996 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
5997 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
5998 IEM_MC_END();
5999 break;
6000
6001 case IEMMODE_64BIT:
6002 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
6003 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
6005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6006 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6007 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
6008 IEM_MC_END();
6009 break;
6010
6011 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6012 }
6013
6014#else
6015# ifndef TST_IEM_CHECK_MC
6016 /* Calc effective address with modified ESP. */
6017/** @todo testcase */
6018 RTGCPTR GCPtrEff;
6019 VBOXSTRICTRC rcStrict;
6020 switch (pVCpu->iem.s.enmEffOpSize)
6021 {
6022 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
6023 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
6024 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
6025 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6026 }
6027 if (rcStrict != VINF_SUCCESS)
6028 return rcStrict;
6029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6030
6031 /* Perform the operation - this should be CImpl. */
6032 RTUINT64U TmpRsp;
6033 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
6034 switch (pVCpu->iem.s.enmEffOpSize)
6035 {
6036 case IEMMODE_16BIT:
6037 {
6038 uint16_t u16Value;
6039 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
6040 if (rcStrict == VINF_SUCCESS)
6041 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
6042 break;
6043 }
6044
6045 case IEMMODE_32BIT:
6046 {
6047 uint32_t u32Value;
6048 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
6049 if (rcStrict == VINF_SUCCESS)
6050 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
6051 break;
6052 }
6053
6054 case IEMMODE_64BIT:
6055 {
6056 uint64_t u64Value;
6057 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
6058 if (rcStrict == VINF_SUCCESS)
6059 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
6060 break;
6061 }
6062
6063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6064 }
6065 if (rcStrict == VINF_SUCCESS)
6066 {
6067 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
6068 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
6069 }
6070 return rcStrict;
6071
6072# else
6073 return VERR_IEM_IPE_2;
6074# endif
6075#endif
6076}
6077
6078
6079/**
6080 * @opcode 0x8f
6081 */
6082FNIEMOP_DEF(iemOp_Grp1A__xop)
6083{
6084 /*
6085 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
6086 * three byte VEX prefix, except that the mmmmm field cannot have the values
6087 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
6088 */
6089 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6090 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
6091 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
6092
6093 IEMOP_MNEMONIC(xop, "xop");
6094 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
6095 {
6096 /** @todo Test when exctly the XOP conformance checks kick in during
6097 * instruction decoding and fetching (using \#PF). */
6098 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6099 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6100 if ( ( pVCpu->iem.s.fPrefixes
6101 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6102 == 0)
6103 {
6104 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6105 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6106 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6107 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6108 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6109 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6110 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6111 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6112 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6113
6114 /** @todo XOP: Just use new tables and decoders. */
6115 switch (bRm & 0x1f)
6116 {
6117 case 8: /* xop opcode map 8. */
6118 IEMOP_BITCH_ABOUT_STUB();
6119 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6120
6121 case 9: /* xop opcode map 9. */
6122 IEMOP_BITCH_ABOUT_STUB();
6123 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6124
6125 case 10: /* xop opcode map 10. */
6126 IEMOP_BITCH_ABOUT_STUB();
6127 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6128
6129 default:
6130 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6131 IEMOP_RAISE_INVALID_OPCODE_RET();
6132 }
6133 }
6134 else
6135 Log(("XOP: Invalid prefix mix!\n"));
6136 }
6137 else
6138 Log(("XOP: XOP support disabled!\n"));
6139 IEMOP_RAISE_INVALID_OPCODE_RET();
6140}
6141
6142
6143/**
6144 * Common 'xchg reg,rAX' helper.
6145 */
6146FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6147{
6148 iReg |= pVCpu->iem.s.uRexB;
6149 switch (pVCpu->iem.s.enmEffOpSize)
6150 {
6151 case IEMMODE_16BIT:
6152 IEM_MC_BEGIN(0, 2, 0, 0);
6153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6154 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6155 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6156 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6157 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6158 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6159 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6160 IEM_MC_ADVANCE_RIP_AND_FINISH();
6161 IEM_MC_END();
6162 break;
6163
6164 case IEMMODE_32BIT:
6165 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6167 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6168 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6169 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6170 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6171 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6172 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6173 IEM_MC_ADVANCE_RIP_AND_FINISH();
6174 IEM_MC_END();
6175 break;
6176
6177 case IEMMODE_64BIT:
6178 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6180 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6181 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6182 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6183 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6184 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6185 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6186 IEM_MC_ADVANCE_RIP_AND_FINISH();
6187 IEM_MC_END();
6188 break;
6189
6190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6191 }
6192}
6193
6194
6195/**
6196 * @opcode 0x90
6197 */
6198FNIEMOP_DEF(iemOp_nop)
6199{
6200 /* R8/R8D and RAX/EAX can be exchanged. */
6201 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6202 {
6203 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6204 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6205 }
6206
6207 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6208 {
6209 IEMOP_MNEMONIC(pause, "pause");
6210 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6211 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6212 if (!IEM_IS_IN_GUEST(pVCpu))
6213 { /* probable */ }
6214#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6215 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6216 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmx_pause);
6217#endif
6218#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6219 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6220 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_svm_pause);
6221#endif
6222 }
6223 else
6224 IEMOP_MNEMONIC(nop, "nop");
6225 /** @todo testcase: lock nop; lock pause */
6226 IEM_MC_BEGIN(0, 0, 0, 0);
6227 IEMOP_HLP_DONE_DECODING();
6228 IEM_MC_ADVANCE_RIP_AND_FINISH();
6229 IEM_MC_END();
6230}
6231
6232
6233/**
6234 * @opcode 0x91
6235 */
6236FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6237{
6238 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6239 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6240}
6241
6242
6243/**
6244 * @opcode 0x92
6245 */
6246FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6247{
6248 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6249 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6250}
6251
6252
6253/**
6254 * @opcode 0x93
6255 */
6256FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6257{
6258 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6259 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6260}
6261
6262
6263/**
6264 * @opcode 0x94
6265 */
6266FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6267{
6268 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6269 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6270}
6271
6272
6273/**
6274 * @opcode 0x95
6275 */
6276FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6277{
6278 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6279 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6280}
6281
6282
6283/**
6284 * @opcode 0x96
6285 */
6286FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6287{
6288 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6289 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6290}
6291
6292
6293/**
6294 * @opcode 0x97
6295 */
6296FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6297{
6298 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6299 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6300}
6301
6302
6303/**
6304 * @opcode 0x98
6305 */
6306FNIEMOP_DEF(iemOp_cbw)
6307{
6308 switch (pVCpu->iem.s.enmEffOpSize)
6309 {
6310 case IEMMODE_16BIT:
6311 IEMOP_MNEMONIC(cbw, "cbw");
6312 IEM_MC_BEGIN(0, 1, 0, 0);
6313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6314 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6315 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6316 } IEM_MC_ELSE() {
6317 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6318 } IEM_MC_ENDIF();
6319 IEM_MC_ADVANCE_RIP_AND_FINISH();
6320 IEM_MC_END();
6321 break;
6322
6323 case IEMMODE_32BIT:
6324 IEMOP_MNEMONIC(cwde, "cwde");
6325 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6327 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6328 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6329 } IEM_MC_ELSE() {
6330 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6331 } IEM_MC_ENDIF();
6332 IEM_MC_ADVANCE_RIP_AND_FINISH();
6333 IEM_MC_END();
6334 break;
6335
6336 case IEMMODE_64BIT:
6337 IEMOP_MNEMONIC(cdqe, "cdqe");
6338 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6340 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6341 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6342 } IEM_MC_ELSE() {
6343 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6344 } IEM_MC_ENDIF();
6345 IEM_MC_ADVANCE_RIP_AND_FINISH();
6346 IEM_MC_END();
6347 break;
6348
6349 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6350 }
6351}
6352
6353
6354/**
6355 * @opcode 0x99
6356 */
6357FNIEMOP_DEF(iemOp_cwd)
6358{
6359 switch (pVCpu->iem.s.enmEffOpSize)
6360 {
6361 case IEMMODE_16BIT:
6362 IEMOP_MNEMONIC(cwd, "cwd");
6363 IEM_MC_BEGIN(0, 1, 0, 0);
6364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6365 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6366 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6367 } IEM_MC_ELSE() {
6368 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6369 } IEM_MC_ENDIF();
6370 IEM_MC_ADVANCE_RIP_AND_FINISH();
6371 IEM_MC_END();
6372 break;
6373
6374 case IEMMODE_32BIT:
6375 IEMOP_MNEMONIC(cdq, "cdq");
6376 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6378 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6379 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6380 } IEM_MC_ELSE() {
6381 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6382 } IEM_MC_ENDIF();
6383 IEM_MC_ADVANCE_RIP_AND_FINISH();
6384 IEM_MC_END();
6385 break;
6386
6387 case IEMMODE_64BIT:
6388 IEMOP_MNEMONIC(cqo, "cqo");
6389 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6391 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6392 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6393 } IEM_MC_ELSE() {
6394 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6395 } IEM_MC_ENDIF();
6396 IEM_MC_ADVANCE_RIP_AND_FINISH();
6397 IEM_MC_END();
6398 break;
6399
6400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6401 }
6402}
6403
6404
6405/**
6406 * @opcode 0x9a
6407 */
6408FNIEMOP_DEF(iemOp_call_Ap)
6409{
6410 IEMOP_MNEMONIC(call_Ap, "call Ap");
6411 IEMOP_HLP_NO_64BIT();
6412
6413 /* Decode the far pointer address and pass it on to the far call C implementation. */
6414 uint32_t off32Seg;
6415 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6416 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6417 else
6418 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6419 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6421 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
6422 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
6423 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6424 /** @todo make task-switches, ring-switches, ++ return non-zero status */
6425}
6426
6427
6428/** Opcode 0x9b. (aka fwait) */
6429FNIEMOP_DEF(iemOp_wait)
6430{
6431 IEMOP_MNEMONIC(wait, "wait");
6432 IEM_MC_BEGIN(0, 0, 0, 0);
6433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6434 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6435 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6436 IEM_MC_ADVANCE_RIP_AND_FINISH();
6437 IEM_MC_END();
6438}
6439
6440
6441/**
6442 * @opcode 0x9c
6443 */
6444FNIEMOP_DEF(iemOp_pushf_Fv)
6445{
6446 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6448 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6449 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6450 iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6451}
6452
6453
6454/**
6455 * @opcode 0x9d
6456 */
6457FNIEMOP_DEF(iemOp_popf_Fv)
6458{
6459 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6461 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6462 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6463 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6464 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6465}
6466
6467
6468/**
6469 * @opcode 0x9e
6470 */
6471FNIEMOP_DEF(iemOp_sahf)
6472{
6473 IEMOP_MNEMONIC(sahf, "sahf");
6474 if ( IEM_IS_64BIT_CODE(pVCpu)
6475 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6476 IEMOP_RAISE_INVALID_OPCODE_RET();
6477 IEM_MC_BEGIN(0, 2, 0, 0);
6478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6479 IEM_MC_LOCAL(uint32_t, u32Flags);
6480 IEM_MC_LOCAL(uint32_t, EFlags);
6481 IEM_MC_FETCH_EFLAGS(EFlags);
6482 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6483 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6484 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6485 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6486 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6487 IEM_MC_COMMIT_EFLAGS(EFlags);
6488 IEM_MC_ADVANCE_RIP_AND_FINISH();
6489 IEM_MC_END();
6490}
6491
6492
6493/**
6494 * @opcode 0x9f
6495 */
6496FNIEMOP_DEF(iemOp_lahf)
6497{
6498 IEMOP_MNEMONIC(lahf, "lahf");
6499 if ( IEM_IS_64BIT_CODE(pVCpu)
6500 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6501 IEMOP_RAISE_INVALID_OPCODE_RET();
6502 IEM_MC_BEGIN(0, 1, 0, 0);
6503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6504 IEM_MC_LOCAL(uint8_t, u8Flags);
6505 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6506 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6507 IEM_MC_ADVANCE_RIP_AND_FINISH();
6508 IEM_MC_END();
6509}
6510
6511
6512/**
6513 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6514 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6515 * Will return/throw on failures.
6516 * @param a_GCPtrMemOff The variable to store the offset in.
6517 */
6518#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6519 do \
6520 { \
6521 switch (pVCpu->iem.s.enmEffAddrMode) \
6522 { \
6523 case IEMMODE_16BIT: \
6524 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6525 break; \
6526 case IEMMODE_32BIT: \
6527 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6528 break; \
6529 case IEMMODE_64BIT: \
6530 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6531 break; \
6532 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6533 } \
6534 } while (0)
6535
6536/**
6537 * @opcode 0xa0
6538 */
6539FNIEMOP_DEF(iemOp_mov_AL_Ob)
6540{
6541 /*
6542 * Get the offset.
6543 */
6544 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6545 RTGCPTR GCPtrMemOffDecode;
6546 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6547
6548 /*
6549 * Fetch AL.
6550 */
6551 IEM_MC_BEGIN(0, 2, 0, 0);
6552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6553 IEM_MC_LOCAL(uint8_t, u8Tmp);
6554 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6555 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6556 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6557 IEM_MC_ADVANCE_RIP_AND_FINISH();
6558 IEM_MC_END();
6559}
6560
6561
6562/**
6563 * @opcode 0xa1
6564 */
6565FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6566{
6567 /*
6568 * Get the offset.
6569 */
6570 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6571 RTGCPTR GCPtrMemOffDecode;
6572 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6573
6574 /*
6575 * Fetch rAX.
6576 */
6577 switch (pVCpu->iem.s.enmEffOpSize)
6578 {
6579 case IEMMODE_16BIT:
6580 IEM_MC_BEGIN(0, 2, 0, 0);
6581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6582 IEM_MC_LOCAL(uint16_t, u16Tmp);
6583 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6584 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6585 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6586 IEM_MC_ADVANCE_RIP_AND_FINISH();
6587 IEM_MC_END();
6588 break;
6589
6590 case IEMMODE_32BIT:
6591 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6593 IEM_MC_LOCAL(uint32_t, u32Tmp);
6594 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6595 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6596 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6597 IEM_MC_ADVANCE_RIP_AND_FINISH();
6598 IEM_MC_END();
6599 break;
6600
6601 case IEMMODE_64BIT:
6602 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6604 IEM_MC_LOCAL(uint64_t, u64Tmp);
6605 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6606 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6607 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6608 IEM_MC_ADVANCE_RIP_AND_FINISH();
6609 IEM_MC_END();
6610 break;
6611
6612 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6613 }
6614}
6615
6616
6617/**
6618 * @opcode 0xa2
6619 */
6620FNIEMOP_DEF(iemOp_mov_Ob_AL)
6621{
6622 /*
6623 * Get the offset.
6624 */
6625 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6626 RTGCPTR GCPtrMemOffDecode;
6627 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6628
6629 /*
6630 * Store AL.
6631 */
6632 IEM_MC_BEGIN(0, 2, 0, 0);
6633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6634 IEM_MC_LOCAL(uint8_t, u8Tmp);
6635 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6636 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6637 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6638 IEM_MC_ADVANCE_RIP_AND_FINISH();
6639 IEM_MC_END();
6640}
6641
6642
6643/**
6644 * @opcode 0xa3
6645 */
6646FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6647{
6648 /*
6649 * Get the offset.
6650 */
6651 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6652 RTGCPTR GCPtrMemOffDecode;
6653 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6654
6655 /*
6656 * Store rAX.
6657 */
6658 switch (pVCpu->iem.s.enmEffOpSize)
6659 {
6660 case IEMMODE_16BIT:
6661 IEM_MC_BEGIN(0, 2, 0, 0);
6662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6663 IEM_MC_LOCAL(uint16_t, u16Tmp);
6664 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6665 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6666 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6667 IEM_MC_ADVANCE_RIP_AND_FINISH();
6668 IEM_MC_END();
6669 break;
6670
6671 case IEMMODE_32BIT:
6672 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6674 IEM_MC_LOCAL(uint32_t, u32Tmp);
6675 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6676 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6677 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6678 IEM_MC_ADVANCE_RIP_AND_FINISH();
6679 IEM_MC_END();
6680 break;
6681
6682 case IEMMODE_64BIT:
6683 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6685 IEM_MC_LOCAL(uint64_t, u64Tmp);
6686 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6687 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6688 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6689 IEM_MC_ADVANCE_RIP_AND_FINISH();
6690 IEM_MC_END();
6691 break;
6692
6693 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6694 }
6695}
6696
6697/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6698#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
6699 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6701 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6702 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6703 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6704 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6705 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6706 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6707 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6708 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6709 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6710 } IEM_MC_ELSE() { \
6711 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6712 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6713 } IEM_MC_ENDIF(); \
6714 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6715 IEM_MC_END() \
6716
6717/**
6718 * @opcode 0xa4
6719 * @opfltest df
6720 */
6721FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6722{
6723 /*
6724 * Use the C implementation if a repeat prefix is encountered.
6725 */
6726 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6727 {
6728 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6730 switch (pVCpu->iem.s.enmEffAddrMode)
6731 {
6732 case IEMMODE_16BIT:
6733 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6734 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6735 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6736 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6737 iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6738 case IEMMODE_32BIT:
6739 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6740 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6741 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6742 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6743 iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6744 case IEMMODE_64BIT:
6745 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6746 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6747 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6748 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6749 iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6750 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6751 }
6752 }
6753
6754 /*
6755 * Sharing case implementation with movs[wdq] below.
6756 */
6757 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6758 switch (pVCpu->iem.s.enmEffAddrMode)
6759 {
6760 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6761 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6762 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
6763 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6764 }
6765}
6766
6767
6768/**
6769 * @opcode 0xa5
6770 * @opfltest df
6771 */
6772FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6773{
6774
6775 /*
6776 * Use the C implementation if a repeat prefix is encountered.
6777 */
6778 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6779 {
6780 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6782 switch (pVCpu->iem.s.enmEffOpSize)
6783 {
6784 case IEMMODE_16BIT:
6785 switch (pVCpu->iem.s.enmEffAddrMode)
6786 {
6787 case IEMMODE_16BIT:
6788 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6789 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6790 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6791 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6792 iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6793 case IEMMODE_32BIT:
6794 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6795 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6796 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6797 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6798 iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6799 case IEMMODE_64BIT:
6800 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6801 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6802 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6803 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6804 iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6805 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6806 }
6807 break;
6808 case IEMMODE_32BIT:
6809 switch (pVCpu->iem.s.enmEffAddrMode)
6810 {
6811 case IEMMODE_16BIT:
6812 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6813 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6814 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6815 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6816 iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6817 case IEMMODE_32BIT:
6818 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6819 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6820 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6821 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6822 iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6823 case IEMMODE_64BIT:
6824 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6825 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6826 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6827 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6828 iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6829 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6830 }
6831 case IEMMODE_64BIT:
6832 switch (pVCpu->iem.s.enmEffAddrMode)
6833 {
6834 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6835 case IEMMODE_32BIT:
6836 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6837 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6838 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6839 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6840 iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6841 case IEMMODE_64BIT:
6842 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6843 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6844 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6845 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6846 iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6847 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6848 }
6849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6850 }
6851 }
6852
6853 /*
6854 * Annoying double switch here.
6855 * Using ugly macro for implementing the cases, sharing it with movsb.
6856 */
6857 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6858 switch (pVCpu->iem.s.enmEffOpSize)
6859 {
6860 case IEMMODE_16BIT:
6861 switch (pVCpu->iem.s.enmEffAddrMode)
6862 {
6863 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6864 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6865 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
6866 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6867 }
6868 break;
6869
6870 case IEMMODE_32BIT:
6871 switch (pVCpu->iem.s.enmEffAddrMode)
6872 {
6873 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6874 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6875 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
6876 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6877 }
6878 break;
6879
6880 case IEMMODE_64BIT:
6881 switch (pVCpu->iem.s.enmEffAddrMode)
6882 {
6883 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6884 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
6885 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
6886 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6887 }
6888 break;
6889 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6890 }
6891}
6892
6893#undef IEM_MOVS_CASE
6894
6895/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6896#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
6897 IEM_MC_BEGIN(3, 3, a_fMcFlags, 0); \
6898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6899 \
6900 IEM_MC_LOCAL(RTGCPTR, uAddr1); \
6901 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr1, X86_GREG_xSI); \
6902 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6903 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr1); \
6904 \
6905 IEM_MC_LOCAL(RTGCPTR, uAddr2); \
6906 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr2, X86_GREG_xDI); \
6907 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6908 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr2); \
6909 \
6910 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6911 IEM_MC_REF_EFLAGS(pEFlags); \
6912 IEM_MC_ARG_LOCAL_REF(uint##ValBits##_t *, puValue1, uValue1, 0); \
6913 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6914 \
6915 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6916 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6917 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6918 } IEM_MC_ELSE() { \
6919 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6920 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6921 } IEM_MC_ENDIF(); \
6922 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6923 IEM_MC_END() \
6924
6925/**
6926 * @opcode 0xa6
6927 * @opflclass arithmetic
6928 * @opfltest df
6929 */
6930FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6931{
6932
6933 /*
6934 * Use the C implementation if a repeat prefix is encountered.
6935 */
6936 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6937 {
6938 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6940 switch (pVCpu->iem.s.enmEffAddrMode)
6941 {
6942 case IEMMODE_16BIT:
6943 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6944 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6945 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6946 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6947 iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6948 case IEMMODE_32BIT:
6949 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6950 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6951 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6952 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6953 iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6954 case IEMMODE_64BIT:
6955 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6956 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6957 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6958 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6959 iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6960 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6961 }
6962 }
6963 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6964 {
6965 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6967 switch (pVCpu->iem.s.enmEffAddrMode)
6968 {
6969 case IEMMODE_16BIT:
6970 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6971 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6972 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6973 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6974 iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6975 case IEMMODE_32BIT:
6976 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6977 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6978 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6979 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6980 iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6981 case IEMMODE_64BIT:
6982 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6983 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6984 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6985 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6986 iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6988 }
6989 }
6990
6991 /*
6992 * Sharing case implementation with cmps[wdq] below.
6993 */
6994 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
6995 switch (pVCpu->iem.s.enmEffAddrMode)
6996 {
6997 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6998 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6999 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
7000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7001 }
7002}
7003
7004
7005/**
7006 * @opcode 0xa7
7007 * @opflclass arithmetic
7008 * @opfltest df
7009 */
7010FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
7011{
7012 /*
7013 * Use the C implementation if a repeat prefix is encountered.
7014 */
7015 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7016 {
7017 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
7018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7019 switch (pVCpu->iem.s.enmEffOpSize)
7020 {
7021 case IEMMODE_16BIT:
7022 switch (pVCpu->iem.s.enmEffAddrMode)
7023 {
7024 case IEMMODE_16BIT:
7025 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7026 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7027 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7028 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7029 iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7030 case IEMMODE_32BIT:
7031 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7032 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7033 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7034 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7035 iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7036 case IEMMODE_64BIT:
7037 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7038 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7039 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7040 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7041 iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7042 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7043 }
7044 break;
7045 case IEMMODE_32BIT:
7046 switch (pVCpu->iem.s.enmEffAddrMode)
7047 {
7048 case IEMMODE_16BIT:
7049 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7050 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7051 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7052 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7053 iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7054 case IEMMODE_32BIT:
7055 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7056 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7057 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7058 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7059 iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7060 case IEMMODE_64BIT:
7061 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7062 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7063 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7064 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7065 iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7067 }
7068 case IEMMODE_64BIT:
7069 switch (pVCpu->iem.s.enmEffAddrMode)
7070 {
7071 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
7072 case IEMMODE_32BIT:
7073 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7074 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7075 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7076 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7077 iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7078 case IEMMODE_64BIT:
7079 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7080 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7081 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7082 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7083 iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7084 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7085 }
7086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7087 }
7088 }
7089
7090 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7091 {
7092 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
7093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7094 switch (pVCpu->iem.s.enmEffOpSize)
7095 {
7096 case IEMMODE_16BIT:
7097 switch (pVCpu->iem.s.enmEffAddrMode)
7098 {
7099 case IEMMODE_16BIT:
7100 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7101 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7102 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7103 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7104 iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7105 case IEMMODE_32BIT:
7106 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7107 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7108 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7109 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7110 iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7111 case IEMMODE_64BIT:
7112 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7113 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7114 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7115 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7116 iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7117 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7118 }
7119 break;
7120 case IEMMODE_32BIT:
7121 switch (pVCpu->iem.s.enmEffAddrMode)
7122 {
7123 case IEMMODE_16BIT:
7124 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7125 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7126 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7127 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7128 iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7129 case IEMMODE_32BIT:
7130 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7131 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7132 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7133 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7134 iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7135 case IEMMODE_64BIT:
7136 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7137 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7138 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7139 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7140 iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7141 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7142 }
7143 case IEMMODE_64BIT:
7144 switch (pVCpu->iem.s.enmEffAddrMode)
7145 {
7146 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
7147 case IEMMODE_32BIT:
7148 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7149 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7150 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7151 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7152 iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7153 case IEMMODE_64BIT:
7154 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7155 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7156 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7157 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7158 iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7159 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7160 }
7161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7162 }
7163 }
7164
7165 /*
7166 * Annoying double switch here.
7167 * Using ugly macro for implementing the cases, sharing it with cmpsb.
7168 */
7169 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
7170 switch (pVCpu->iem.s.enmEffOpSize)
7171 {
7172 case IEMMODE_16BIT:
7173 switch (pVCpu->iem.s.enmEffAddrMode)
7174 {
7175 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7176 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7177 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
7178 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7179 }
7180 break;
7181
7182 case IEMMODE_32BIT:
7183 switch (pVCpu->iem.s.enmEffAddrMode)
7184 {
7185 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7186 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7187 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
7188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7189 }
7190 break;
7191
7192 case IEMMODE_64BIT:
7193 switch (pVCpu->iem.s.enmEffAddrMode)
7194 {
7195 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7196 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
7197 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
7198 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7199 }
7200 break;
7201 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7202 }
7203}
7204
7205#undef IEM_CMPS_CASE
7206
7207/**
7208 * @opcode 0xa8
7209 * @opflclass logical
7210 */
7211FNIEMOP_DEF(iemOp_test_AL_Ib)
7212{
7213 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
7214 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7215 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
7216}
7217
7218
7219/**
7220 * @opcode 0xa9
7221 * @opflclass logical
7222 */
7223FNIEMOP_DEF(iemOp_test_eAX_Iz)
7224{
7225 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
7226 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7227 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
7228}
7229
7230
7231/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
7232#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
7233 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7235 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7236 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7237 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
7238 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7239 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7240 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7241 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7242 } IEM_MC_ELSE() { \
7243 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7244 } IEM_MC_ENDIF(); \
7245 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7246 IEM_MC_END() \
7247
7248/**
7249 * @opcode 0xaa
7250 */
7251FNIEMOP_DEF(iemOp_stosb_Yb_AL)
7252{
7253 /*
7254 * Use the C implementation if a repeat prefix is encountered.
7255 */
7256 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7257 {
7258 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
7259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7260 switch (pVCpu->iem.s.enmEffAddrMode)
7261 {
7262 case IEMMODE_16BIT:
7263 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7264 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7265 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7266 iemCImpl_stos_al_m16);
7267 case IEMMODE_32BIT:
7268 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7269 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7270 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7271 iemCImpl_stos_al_m32);
7272 case IEMMODE_64BIT:
7273 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7274 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7275 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7276 iemCImpl_stos_al_m64);
7277 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7278 }
7279 }
7280
7281 /*
7282 * Sharing case implementation with stos[wdq] below.
7283 */
7284 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
7285 switch (pVCpu->iem.s.enmEffAddrMode)
7286 {
7287 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7288 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7289 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
7290 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7291 }
7292}
7293
7294
7295/**
7296 * @opcode 0xab
7297 */
7298FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7299{
7300 /*
7301 * Use the C implementation if a repeat prefix is encountered.
7302 */
7303 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7304 {
7305 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7307 switch (pVCpu->iem.s.enmEffOpSize)
7308 {
7309 case IEMMODE_16BIT:
7310 switch (pVCpu->iem.s.enmEffAddrMode)
7311 {
7312 case IEMMODE_16BIT:
7313 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7314 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7315 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7316 iemCImpl_stos_ax_m16);
7317 case IEMMODE_32BIT:
7318 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7319 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7320 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7321 iemCImpl_stos_ax_m32);
7322 case IEMMODE_64BIT:
7323 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7324 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7325 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7326 iemCImpl_stos_ax_m64);
7327 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7328 }
7329 break;
7330 case IEMMODE_32BIT:
7331 switch (pVCpu->iem.s.enmEffAddrMode)
7332 {
7333 case IEMMODE_16BIT:
7334 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7335 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7336 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7337 iemCImpl_stos_eax_m16);
7338 case IEMMODE_32BIT:
7339 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7340 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7341 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7342 iemCImpl_stos_eax_m32);
7343 case IEMMODE_64BIT:
7344 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7345 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7346 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7347 iemCImpl_stos_eax_m64);
7348 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7349 }
7350 case IEMMODE_64BIT:
7351 switch (pVCpu->iem.s.enmEffAddrMode)
7352 {
7353 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7354 case IEMMODE_32BIT:
7355 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7356 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7357 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7358 iemCImpl_stos_rax_m32);
7359 case IEMMODE_64BIT:
7360 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7361 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7362 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7363 iemCImpl_stos_rax_m64);
7364 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7365 }
7366 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7367 }
7368 }
7369
7370 /*
7371 * Annoying double switch here.
7372 * Using ugly macro for implementing the cases, sharing it with stosb.
7373 */
7374 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7375 switch (pVCpu->iem.s.enmEffOpSize)
7376 {
7377 case IEMMODE_16BIT:
7378 switch (pVCpu->iem.s.enmEffAddrMode)
7379 {
7380 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7381 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7382 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7383 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7384 }
7385 break;
7386
7387 case IEMMODE_32BIT:
7388 switch (pVCpu->iem.s.enmEffAddrMode)
7389 {
7390 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7391 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7392 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7393 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7394 }
7395 break;
7396
7397 case IEMMODE_64BIT:
7398 switch (pVCpu->iem.s.enmEffAddrMode)
7399 {
7400 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7401 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7402 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7403 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7404 }
7405 break;
7406 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7407 }
7408}
7409
7410#undef IEM_STOS_CASE
7411
7412/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7413#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7414 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7416 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7417 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7418 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7419 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7420 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7421 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7422 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7423 } IEM_MC_ELSE() { \
7424 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7425 } IEM_MC_ENDIF(); \
7426 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7427 IEM_MC_END() \
7428
7429/**
7430 * @opcode 0xac
7431 * @opfltest df
7432 */
7433FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7434{
7435 /*
7436 * Use the C implementation if a repeat prefix is encountered.
7437 */
7438 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7439 {
7440 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7442 switch (pVCpu->iem.s.enmEffAddrMode)
7443 {
7444 case IEMMODE_16BIT:
7445 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7446 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7447 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7448 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7449 iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7450 case IEMMODE_32BIT:
7451 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7452 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7453 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7454 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7455 iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7456 case IEMMODE_64BIT:
7457 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7458 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7459 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7460 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7461 iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7463 }
7464 }
7465
7466 /*
7467 * Sharing case implementation with stos[wdq] below.
7468 */
7469 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7470 switch (pVCpu->iem.s.enmEffAddrMode)
7471 {
7472 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7473 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7474 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7475 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7476 }
7477}
7478
7479
7480/**
7481 * @opcode 0xad
7482 * @opfltest df
7483 */
7484FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7485{
7486 /*
7487 * Use the C implementation if a repeat prefix is encountered.
7488 */
7489 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7490 {
7491 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7493 switch (pVCpu->iem.s.enmEffOpSize)
7494 {
7495 case IEMMODE_16BIT:
7496 switch (pVCpu->iem.s.enmEffAddrMode)
7497 {
7498 case IEMMODE_16BIT:
7499 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7500 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7501 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7502 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7503 iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7504 case IEMMODE_32BIT:
7505 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7506 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7507 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7508 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7509 iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7510 case IEMMODE_64BIT:
7511 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7512 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7513 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7514 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7515 iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7516 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7517 }
7518 break;
7519 case IEMMODE_32BIT:
7520 switch (pVCpu->iem.s.enmEffAddrMode)
7521 {
7522 case IEMMODE_16BIT:
7523 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7524 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7525 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7526 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7527 iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7528 case IEMMODE_32BIT:
7529 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7530 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7531 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7532 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7533 iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7534 case IEMMODE_64BIT:
7535 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7536 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7537 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7538 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7539 iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7540 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7541 }
7542 case IEMMODE_64BIT:
7543 switch (pVCpu->iem.s.enmEffAddrMode)
7544 {
7545 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7546 case IEMMODE_32BIT:
7547 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7548 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7549 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7550 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7551 iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7552 case IEMMODE_64BIT:
7553 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7554 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7555 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7556 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7557 iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7558 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7559 }
7560 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7561 }
7562 }
7563
7564 /*
7565 * Annoying double switch here.
7566 * Using ugly macro for implementing the cases, sharing it with lodsb.
7567 */
7568 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7569 switch (pVCpu->iem.s.enmEffOpSize)
7570 {
7571 case IEMMODE_16BIT:
7572 switch (pVCpu->iem.s.enmEffAddrMode)
7573 {
7574 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7575 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7576 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
7577 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7578 }
7579 break;
7580
7581 case IEMMODE_32BIT:
7582 switch (pVCpu->iem.s.enmEffAddrMode)
7583 {
7584 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7585 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7586 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
7587 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7588 }
7589 break;
7590
7591 case IEMMODE_64BIT:
7592 switch (pVCpu->iem.s.enmEffAddrMode)
7593 {
7594 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7595 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
7596 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
7597 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7598 }
7599 break;
7600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7601 }
7602}
7603
7604#undef IEM_LODS_CASE
7605
7606/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7607#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
7608 IEM_MC_BEGIN(3, 2, a_fMcFlags, 0); \
7609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7610 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7611 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7612 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7613 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7614 \
7615 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7616 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7617 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7618 IEM_MC_REF_EFLAGS(pEFlags); \
7619 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7620 \
7621 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7622 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7623 } IEM_MC_ELSE() { \
7624 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7625 } IEM_MC_ENDIF(); \
7626 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7627 IEM_MC_END();
7628
7629/**
7630 * @opcode 0xae
7631 * @opflclass arithmetic
7632 * @opfltest df
7633 */
7634FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7635{
7636 /*
7637 * Use the C implementation if a repeat prefix is encountered.
7638 */
7639 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7640 {
7641 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7643 switch (pVCpu->iem.s.enmEffAddrMode)
7644 {
7645 case IEMMODE_16BIT:
7646 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7647 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7648 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7649 iemCImpl_repe_scas_al_m16);
7650 case IEMMODE_32BIT:
7651 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7652 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7653 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7654 iemCImpl_repe_scas_al_m32);
7655 case IEMMODE_64BIT:
7656 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7657 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7658 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7659 iemCImpl_repe_scas_al_m64);
7660 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7661 }
7662 }
7663 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7664 {
7665 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7667 switch (pVCpu->iem.s.enmEffAddrMode)
7668 {
7669 case IEMMODE_16BIT:
7670 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7671 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7672 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7673 iemCImpl_repne_scas_al_m16);
7674 case IEMMODE_32BIT:
7675 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7676 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7677 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7678 iemCImpl_repne_scas_al_m32);
7679 case IEMMODE_64BIT:
7680 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7681 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7682 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7683 iemCImpl_repne_scas_al_m64);
7684 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7685 }
7686 }
7687
7688 /*
7689 * Sharing case implementation with stos[wdq] below.
7690 */
7691 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7692 switch (pVCpu->iem.s.enmEffAddrMode)
7693 {
7694 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7695 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7696 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
7697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7698 }
7699}
7700
7701
7702/**
7703 * @opcode 0xaf
7704 * @opflclass arithmetic
7705 * @opfltest df
7706 */
7707FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7708{
7709 /*
7710 * Use the C implementation if a repeat prefix is encountered.
7711 */
7712 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7713 {
7714 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7716 switch (pVCpu->iem.s.enmEffOpSize)
7717 {
7718 case IEMMODE_16BIT:
7719 switch (pVCpu->iem.s.enmEffAddrMode)
7720 {
7721 case IEMMODE_16BIT:
7722 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7723 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7724 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7725 iemCImpl_repe_scas_ax_m16);
7726 case IEMMODE_32BIT:
7727 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7728 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7729 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7730 iemCImpl_repe_scas_ax_m32);
7731 case IEMMODE_64BIT:
7732 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7733 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7734 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7735 iemCImpl_repe_scas_ax_m64);
7736 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7737 }
7738 break;
7739 case IEMMODE_32BIT:
7740 switch (pVCpu->iem.s.enmEffAddrMode)
7741 {
7742 case IEMMODE_16BIT:
7743 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7744 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7745 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7746 iemCImpl_repe_scas_eax_m16);
7747 case IEMMODE_32BIT:
7748 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7749 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7750 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7751 iemCImpl_repe_scas_eax_m32);
7752 case IEMMODE_64BIT:
7753 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7754 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7755 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7756 iemCImpl_repe_scas_eax_m64);
7757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7758 }
7759 case IEMMODE_64BIT:
7760 switch (pVCpu->iem.s.enmEffAddrMode)
7761 {
7762 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7763 case IEMMODE_32BIT:
7764 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7765 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7766 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7767 iemCImpl_repe_scas_rax_m32);
7768 case IEMMODE_64BIT:
7769 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7770 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7771 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7772 iemCImpl_repe_scas_rax_m64);
7773 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7774 }
7775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7776 }
7777 }
7778 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7779 {
7780 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7782 switch (pVCpu->iem.s.enmEffOpSize)
7783 {
7784 case IEMMODE_16BIT:
7785 switch (pVCpu->iem.s.enmEffAddrMode)
7786 {
7787 case IEMMODE_16BIT:
7788 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7789 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7790 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7791 iemCImpl_repne_scas_ax_m16);
7792 case IEMMODE_32BIT:
7793 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7794 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7795 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7796 iemCImpl_repne_scas_ax_m32);
7797 case IEMMODE_64BIT:
7798 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7799 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7800 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7801 iemCImpl_repne_scas_ax_m64);
7802 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7803 }
7804 break;
7805 case IEMMODE_32BIT:
7806 switch (pVCpu->iem.s.enmEffAddrMode)
7807 {
7808 case IEMMODE_16BIT:
7809 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7810 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7811 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7812 iemCImpl_repne_scas_eax_m16);
7813 case IEMMODE_32BIT:
7814 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7815 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7816 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7817 iemCImpl_repne_scas_eax_m32);
7818 case IEMMODE_64BIT:
7819 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7820 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7821 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7822 iemCImpl_repne_scas_eax_m64);
7823 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7824 }
7825 case IEMMODE_64BIT:
7826 switch (pVCpu->iem.s.enmEffAddrMode)
7827 {
7828 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
7829 case IEMMODE_32BIT:
7830 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7831 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7832 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7833 iemCImpl_repne_scas_rax_m32);
7834 case IEMMODE_64BIT:
7835 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7836 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7837 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7838 iemCImpl_repne_scas_rax_m64);
7839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7840 }
7841 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7842 }
7843 }
7844
7845 /*
7846 * Annoying double switch here.
7847 * Using ugly macro for implementing the cases, sharing it with scasb.
7848 */
7849 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
7850 switch (pVCpu->iem.s.enmEffOpSize)
7851 {
7852 case IEMMODE_16BIT:
7853 switch (pVCpu->iem.s.enmEffAddrMode)
7854 {
7855 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7856 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7857 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
7858 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7859 }
7860 break;
7861
7862 case IEMMODE_32BIT:
7863 switch (pVCpu->iem.s.enmEffAddrMode)
7864 {
7865 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7866 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7867 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
7868 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7869 }
7870 break;
7871
7872 case IEMMODE_64BIT:
7873 switch (pVCpu->iem.s.enmEffAddrMode)
7874 {
7875 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7876 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
7877 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
7878 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7879 }
7880 break;
7881 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7882 }
7883}
7884
7885#undef IEM_SCAS_CASE
7886
7887/**
7888 * Common 'mov r8, imm8' helper.
7889 */
7890FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
7891{
7892 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7893 IEM_MC_BEGIN(0, 0, 0, 0);
7894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7895 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
7896 IEM_MC_ADVANCE_RIP_AND_FINISH();
7897 IEM_MC_END();
7898}
7899
7900
7901/**
7902 * @opcode 0xb0
7903 */
7904FNIEMOP_DEF(iemOp_mov_AL_Ib)
7905{
7906 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
7907 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7908}
7909
7910
7911/**
7912 * @opcode 0xb1
7913 */
7914FNIEMOP_DEF(iemOp_CL_Ib)
7915{
7916 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
7917 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7918}
7919
7920
7921/**
7922 * @opcode 0xb2
7923 */
7924FNIEMOP_DEF(iemOp_DL_Ib)
7925{
7926 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
7927 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7928}
7929
7930
7931/**
7932 * @opcode 0xb3
7933 */
7934FNIEMOP_DEF(iemOp_BL_Ib)
7935{
7936 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
7937 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7938}
7939
7940
7941/**
7942 * @opcode 0xb4
7943 */
7944FNIEMOP_DEF(iemOp_mov_AH_Ib)
7945{
7946 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
7947 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7948}
7949
7950
7951/**
7952 * @opcode 0xb5
7953 */
7954FNIEMOP_DEF(iemOp_CH_Ib)
7955{
7956 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
7957 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7958}
7959
7960
7961/**
7962 * @opcode 0xb6
7963 */
7964FNIEMOP_DEF(iemOp_DH_Ib)
7965{
7966 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
7967 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7968}
7969
7970
7971/**
7972 * @opcode 0xb7
7973 */
7974FNIEMOP_DEF(iemOp_BH_Ib)
7975{
7976 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
7977 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7978}
7979
7980
7981/**
7982 * Common 'mov regX,immX' helper.
7983 */
7984FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
7985{
7986 switch (pVCpu->iem.s.enmEffOpSize)
7987 {
7988 case IEMMODE_16BIT:
7989 IEM_MC_BEGIN(0, 0, 0, 0);
7990 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7992 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
7993 IEM_MC_ADVANCE_RIP_AND_FINISH();
7994 IEM_MC_END();
7995 break;
7996
7997 case IEMMODE_32BIT:
7998 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7999 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8001 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
8002 IEM_MC_ADVANCE_RIP_AND_FINISH();
8003 IEM_MC_END();
8004 break;
8005
8006 case IEMMODE_64BIT:
8007 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8008 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
8009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8010 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
8011 IEM_MC_ADVANCE_RIP_AND_FINISH();
8012 IEM_MC_END();
8013 break;
8014 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8015 }
8016}
8017
8018
8019/**
8020 * @opcode 0xb8
8021 */
8022FNIEMOP_DEF(iemOp_eAX_Iv)
8023{
8024 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
8025 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8026}
8027
8028
8029/**
8030 * @opcode 0xb9
8031 */
8032FNIEMOP_DEF(iemOp_eCX_Iv)
8033{
8034 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
8035 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8036}
8037
8038
8039/**
8040 * @opcode 0xba
8041 */
8042FNIEMOP_DEF(iemOp_eDX_Iv)
8043{
8044 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
8045 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8046}
8047
8048
8049/**
8050 * @opcode 0xbb
8051 */
8052FNIEMOP_DEF(iemOp_eBX_Iv)
8053{
8054 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
8055 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8056}
8057
8058
8059/**
8060 * @opcode 0xbc
8061 */
8062FNIEMOP_DEF(iemOp_eSP_Iv)
8063{
8064 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
8065 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8066}
8067
8068
8069/**
8070 * @opcode 0xbd
8071 */
8072FNIEMOP_DEF(iemOp_eBP_Iv)
8073{
8074 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
8075 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8076}
8077
8078
8079/**
8080 * @opcode 0xbe
8081 */
8082FNIEMOP_DEF(iemOp_eSI_Iv)
8083{
8084 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
8085 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8086}
8087
8088
8089/**
8090 * @opcode 0xbf
8091 */
8092FNIEMOP_DEF(iemOp_eDI_Iv)
8093{
8094 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
8095 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8096}
8097
8098
8099/**
8100 * @opcode 0xc0
8101 */
8102FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
8103{
8104 IEMOP_HLP_MIN_186();
8105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8106 PCIEMOPSHIFTSIZES pImpl;
8107 switch (IEM_GET_MODRM_REG_8(bRm))
8108 {
8109 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
8110 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
8111 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
8112 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
8113 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
8114 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
8115 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
8116 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8117 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8118 }
8119 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8120
8121 if (IEM_IS_MODRM_REG_MODE(bRm))
8122 {
8123 /* register */
8124 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8125 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
8126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8127 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8128 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8129 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8130 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8131 IEM_MC_REF_EFLAGS(pEFlags);
8132 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8133 IEM_MC_ADVANCE_RIP_AND_FINISH();
8134 IEM_MC_END();
8135 }
8136 else
8137 {
8138 /* memory */
8139 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0);
8140 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8142
8143 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8145
8146 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8147 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8148 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8149
8150 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8151 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8152 IEM_MC_FETCH_EFLAGS(EFlags);
8153 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8154
8155 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8156 IEM_MC_COMMIT_EFLAGS(EFlags);
8157 IEM_MC_ADVANCE_RIP_AND_FINISH();
8158 IEM_MC_END();
8159 }
8160}
8161
8162
8163/**
8164 * @opcode 0xc1
8165 */
8166FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
8167{
8168 IEMOP_HLP_MIN_186();
8169 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8170 PCIEMOPSHIFTSIZES pImpl;
8171 switch (IEM_GET_MODRM_REG_8(bRm))
8172 {
8173 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
8174 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
8175 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
8176 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
8177 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
8178 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
8179 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
8180 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8181 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8182 }
8183 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8184
8185 if (IEM_IS_MODRM_REG_MODE(bRm))
8186 {
8187 /* register */
8188 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8189 switch (pVCpu->iem.s.enmEffOpSize)
8190 {
8191 case IEMMODE_16BIT:
8192 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
8193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8194 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8195 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8196 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8197 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8198 IEM_MC_REF_EFLAGS(pEFlags);
8199 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8200 IEM_MC_ADVANCE_RIP_AND_FINISH();
8201 IEM_MC_END();
8202 break;
8203
8204 case IEMMODE_32BIT:
8205 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8207 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8208 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8209 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8210 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8211 IEM_MC_REF_EFLAGS(pEFlags);
8212 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8213 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
8214 IEM_MC_ADVANCE_RIP_AND_FINISH();
8215 IEM_MC_END();
8216 break;
8217
8218 case IEMMODE_64BIT:
8219 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8221 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8222 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8223 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8224 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8225 IEM_MC_REF_EFLAGS(pEFlags);
8226 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8227 IEM_MC_ADVANCE_RIP_AND_FINISH();
8228 IEM_MC_END();
8229 break;
8230
8231 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8232 }
8233 }
8234 else
8235 {
8236 /* memory */
8237 switch (pVCpu->iem.s.enmEffOpSize)
8238 {
8239 case IEMMODE_16BIT:
8240 IEM_MC_BEGIN(3, 3, 0, 0);
8241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8243
8244 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8246
8247 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8248 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8249 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8250
8251 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8252 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8253 IEM_MC_FETCH_EFLAGS(EFlags);
8254 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8255
8256 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8257 IEM_MC_COMMIT_EFLAGS(EFlags);
8258 IEM_MC_ADVANCE_RIP_AND_FINISH();
8259 IEM_MC_END();
8260 break;
8261
8262 case IEMMODE_32BIT:
8263 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8266
8267 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8269
8270 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8271 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8272 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8273
8274 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8275 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8276 IEM_MC_FETCH_EFLAGS(EFlags);
8277 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8278
8279 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8280 IEM_MC_COMMIT_EFLAGS(EFlags);
8281 IEM_MC_ADVANCE_RIP_AND_FINISH();
8282 IEM_MC_END();
8283 break;
8284
8285 case IEMMODE_64BIT:
8286 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8289
8290 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8292
8293 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8294 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8295 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8296
8297 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8298 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8299 IEM_MC_FETCH_EFLAGS(EFlags);
8300 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8301
8302 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8303 IEM_MC_COMMIT_EFLAGS(EFlags);
8304 IEM_MC_ADVANCE_RIP_AND_FINISH();
8305 IEM_MC_END();
8306 break;
8307
8308 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8309 }
8310 }
8311}
8312
8313
8314/**
8315 * @opcode 0xc2
8316 */
8317FNIEMOP_DEF(iemOp_retn_Iw)
8318{
8319 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
8320 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8321 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8323 switch (pVCpu->iem.s.enmEffOpSize)
8324 {
8325 case IEMMODE_16BIT:
8326 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8327 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_16, u16Imm);
8328 case IEMMODE_32BIT:
8329 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8330 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_32, u16Imm);
8331 case IEMMODE_64BIT:
8332 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8333 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_64, u16Imm);
8334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8335 }
8336}
8337
8338
8339/**
8340 * @opcode 0xc3
8341 */
8342FNIEMOP_DEF(iemOp_retn)
8343{
8344 IEMOP_MNEMONIC(retn, "retn");
8345 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8347 switch (pVCpu->iem.s.enmEffOpSize)
8348 {
8349 case IEMMODE_16BIT:
8350 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8351 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_16);
8352 case IEMMODE_32BIT:
8353 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8354 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_32);
8355 case IEMMODE_64BIT:
8356 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8357 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_64);
8358 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8359 }
8360}
8361
8362
8363/**
8364 * @opcode 0xc4
8365 */
8366FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
8367{
8368 /* The LDS instruction is invalid 64-bit mode. In legacy and
8369 compatability mode it is invalid with MOD=3.
8370 The use as a VEX prefix is made possible by assigning the inverted
8371 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
8372 outside of 64-bit mode. VEX is not available in real or v86 mode. */
8373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8374 if ( IEM_IS_64BIT_CODE(pVCpu)
8375 || IEM_IS_MODRM_REG_MODE(bRm) )
8376 {
8377 IEMOP_MNEMONIC(vex3_prefix, "vex3");
8378 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8379 {
8380 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8381 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8382 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
8383 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8384 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8385 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
8386 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
8387 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8388 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
8389 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
8390 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
8391 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
8392 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
8393
8394 switch (bRm & 0x1f)
8395 {
8396 case 1: /* 0x0f lead opcode byte. */
8397#ifdef IEM_WITH_VEX
8398 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8399#else
8400 IEMOP_BITCH_ABOUT_STUB();
8401 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8402#endif
8403
8404 case 2: /* 0x0f 0x38 lead opcode bytes. */
8405#ifdef IEM_WITH_VEX
8406 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8407#else
8408 IEMOP_BITCH_ABOUT_STUB();
8409 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8410#endif
8411
8412 case 3: /* 0x0f 0x3a lead opcode bytes. */
8413#ifdef IEM_WITH_VEX
8414 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8415#else
8416 IEMOP_BITCH_ABOUT_STUB();
8417 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8418#endif
8419
8420 default:
8421 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
8422 IEMOP_RAISE_INVALID_OPCODE_RET();
8423 }
8424 }
8425 Log(("VEX3: VEX support disabled!\n"));
8426 IEMOP_RAISE_INVALID_OPCODE_RET();
8427 }
8428
8429 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
8430 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
8431}
8432
8433
8434/**
8435 * @opcode 0xc5
8436 */
8437FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
8438{
8439 /* The LES instruction is invalid 64-bit mode. In legacy and
8440 compatability mode it is invalid with MOD=3.
8441 The use as a VEX prefix is made possible by assigning the inverted
8442 REX.R to the top MOD bit, and the top bit in the inverted register
8443 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
8444 to accessing registers 0..7 in this VEX form. */
8445 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8446 if ( IEM_IS_64BIT_CODE(pVCpu)
8447 || IEM_IS_MODRM_REG_MODE(bRm))
8448 {
8449 IEMOP_MNEMONIC(vex2_prefix, "vex2");
8450 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8451 {
8452 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8453 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8454 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8455 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8456 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8457 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
8458 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
8459 pVCpu->iem.s.idxPrefix = bRm & 0x3;
8460
8461#ifdef IEM_WITH_VEX
8462 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8463#else
8464 IEMOP_BITCH_ABOUT_STUB();
8465 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8466#endif
8467 }
8468
8469 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
8470 Log(("VEX2: VEX support disabled!\n"));
8471 IEMOP_RAISE_INVALID_OPCODE_RET();
8472 }
8473
8474 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
8475 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
8476}
8477
8478
8479/**
8480 * @opcode 0xc6
8481 */
8482FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
8483{
8484 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8485 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8486 IEMOP_RAISE_INVALID_OPCODE_RET();
8487 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
8488
8489 if (IEM_IS_MODRM_REG_MODE(bRm))
8490 {
8491 /* register access */
8492 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8493 IEM_MC_BEGIN(0, 0, 0, 0);
8494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8495 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
8496 IEM_MC_ADVANCE_RIP_AND_FINISH();
8497 IEM_MC_END();
8498 }
8499 else
8500 {
8501 /* memory access. */
8502 IEM_MC_BEGIN(0, 1, 0, 0);
8503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8505 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8507 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
8508 IEM_MC_ADVANCE_RIP_AND_FINISH();
8509 IEM_MC_END();
8510 }
8511}
8512
8513
8514/**
8515 * @opcode 0xc7
8516 */
8517FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
8518{
8519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8520 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
8521 IEMOP_RAISE_INVALID_OPCODE_RET();
8522 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
8523
8524 if (IEM_IS_MODRM_REG_MODE(bRm))
8525 {
8526 /* register access */
8527 switch (pVCpu->iem.s.enmEffOpSize)
8528 {
8529 case IEMMODE_16BIT:
8530 IEM_MC_BEGIN(0, 0, 0, 0);
8531 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8533 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8534 IEM_MC_ADVANCE_RIP_AND_FINISH();
8535 IEM_MC_END();
8536 break;
8537
8538 case IEMMODE_32BIT:
8539 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8540 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8542 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8543 IEM_MC_ADVANCE_RIP_AND_FINISH();
8544 IEM_MC_END();
8545 break;
8546
8547 case IEMMODE_64BIT:
8548 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
8549 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8551 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8552 IEM_MC_ADVANCE_RIP_AND_FINISH();
8553 IEM_MC_END();
8554 break;
8555
8556 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8557 }
8558 }
8559 else
8560 {
8561 /* memory access. */
8562 switch (pVCpu->iem.s.enmEffOpSize)
8563 {
8564 case IEMMODE_16BIT:
8565 IEM_MC_BEGIN(0, 1, 0, 0);
8566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8568 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8570 IEM_MC_STORE_MEM_U16_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8571 IEM_MC_ADVANCE_RIP_AND_FINISH();
8572 IEM_MC_END();
8573 break;
8574
8575 case IEMMODE_32BIT:
8576 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8579 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8581 IEM_MC_STORE_MEM_U32_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8582 IEM_MC_ADVANCE_RIP_AND_FINISH();
8583 IEM_MC_END();
8584 break;
8585
8586 case IEMMODE_64BIT:
8587 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8589 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8590 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8592 IEM_MC_STORE_MEM_U64_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8593 IEM_MC_ADVANCE_RIP_AND_FINISH();
8594 IEM_MC_END();
8595 break;
8596
8597 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8598 }
8599 }
8600}
8601
8602
8603
8604
8605/**
8606 * @opcode 0xc8
8607 */
8608FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8609{
8610 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8611 IEMOP_HLP_MIN_186();
8612 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8613 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
8614 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
8615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8616 IEM_MC_DEFER_TO_CIMPL_3_RET(0,
8617 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8618 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8619 iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
8620}
8621
8622
8623/**
8624 * @opcode 0xc9
8625 */
8626FNIEMOP_DEF(iemOp_leave)
8627{
8628 IEMOP_MNEMONIC(leave, "leave");
8629 IEMOP_HLP_MIN_186();
8630 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8632 IEM_MC_DEFER_TO_CIMPL_1_RET(0,
8633 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8634 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8635 iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
8636}
8637
8638
8639/**
8640 * @opcode 0xca
8641 */
8642FNIEMOP_DEF(iemOp_retf_Iw)
8643{
8644 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
8645 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8647 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8648 | IEM_CIMPL_F_MODE,
8649 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8650 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8651 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8652 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8653 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8654 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8655 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8656 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8657 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8658 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8659 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8660 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8661 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
8662 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
8663 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
8664 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
8665 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
8666 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
8667}
8668
8669
8670/**
8671 * @opcode 0xcb
8672 */
8673FNIEMOP_DEF(iemOp_retf)
8674{
8675 IEMOP_MNEMONIC(retf, "retf");
8676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8677 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8678 | IEM_CIMPL_F_MODE,
8679 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8680 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8681 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8682 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8683 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8684 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8685 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8686 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8687 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8688 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8689 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8690 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8691 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
8692 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
8693 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
8694 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
8695 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
8696 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
8697}
8698
8699
8700/**
8701 * @opcode 0xcc
8702 */
8703FNIEMOP_DEF(iemOp_int3)
8704{
8705 IEMOP_MNEMONIC(int3, "int3");
8706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8707 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8708 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
8709 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
8710}
8711
8712
8713/**
8714 * @opcode 0xcd
8715 */
8716FNIEMOP_DEF(iemOp_int_Ib)
8717{
8718 IEMOP_MNEMONIC(int_Ib, "int Ib");
8719 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
8720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8721 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8722 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, UINT64_MAX,
8723 iemCImpl_int, u8Int, IEMINT_INTN);
8724 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8725}
8726
8727
8728/**
8729 * @opcode 0xce
8730 */
8731FNIEMOP_DEF(iemOp_into)
8732{
8733 IEMOP_MNEMONIC(into, "into");
8734 IEMOP_HLP_NO_64BIT();
8735 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8736 | IEM_CIMPL_F_BRANCH_CONDITIONAL | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8737 UINT64_MAX,
8738 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
8739 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8740}
8741
8742
8743/**
8744 * @opcode 0xcf
8745 */
8746FNIEMOP_DEF(iemOp_iret)
8747{
8748 IEMOP_MNEMONIC(iret, "iret");
8749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8750 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8751 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
8752 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8753 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8754 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8755 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8756 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
8757 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8758 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8759 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8760 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
8761 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8762 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8763 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8764 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
8765 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8766 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8767 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
8768 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
8769 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
8770 /* Segment registers are sanitized when returning to an outer ring, or fully
8771 reloaded when returning to v86 mode. Thus the large flush list above. */
8772}
8773
8774
8775/**
8776 * @opcode 0xd0
8777 */
8778FNIEMOP_DEF(iemOp_Grp2_Eb_1)
8779{
8780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8781 PCIEMOPSHIFTSIZES pImpl;
8782 switch (IEM_GET_MODRM_REG_8(bRm))
8783 {
8784 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
8785 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
8786 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
8787 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
8788 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
8789 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
8790 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
8791 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8792 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8793 }
8794 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8795
8796 if (IEM_IS_MODRM_REG_MODE(bRm))
8797 {
8798 /* register */
8799 IEM_MC_BEGIN(3, 0, 0, 0);
8800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8801 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8802 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8803 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8804 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8805 IEM_MC_REF_EFLAGS(pEFlags);
8806 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8807 IEM_MC_ADVANCE_RIP_AND_FINISH();
8808 IEM_MC_END();
8809 }
8810 else
8811 {
8812 /* memory */
8813 IEM_MC_BEGIN(3, 3, 0, 0);
8814 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8815 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8816 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8818 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8819
8820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8822 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8823 IEM_MC_FETCH_EFLAGS(EFlags);
8824 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8825
8826 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8827 IEM_MC_COMMIT_EFLAGS(EFlags);
8828 IEM_MC_ADVANCE_RIP_AND_FINISH();
8829 IEM_MC_END();
8830 }
8831}
8832
8833
8834
8835/**
8836 * @opcode 0xd1
8837 */
8838FNIEMOP_DEF(iemOp_Grp2_Ev_1)
8839{
8840 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8841 PCIEMOPSHIFTSIZES pImpl;
8842 switch (IEM_GET_MODRM_REG_8(bRm))
8843 {
8844 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
8845 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
8846 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
8847 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
8848 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
8849 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
8850 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
8851 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8852 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8853 }
8854 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8855
8856 if (IEM_IS_MODRM_REG_MODE(bRm))
8857 {
8858 /* register */
8859 switch (pVCpu->iem.s.enmEffOpSize)
8860 {
8861 case IEMMODE_16BIT:
8862 IEM_MC_BEGIN(3, 0, 0, 0);
8863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8864 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8865 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8866 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8867 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8868 IEM_MC_REF_EFLAGS(pEFlags);
8869 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8870 IEM_MC_ADVANCE_RIP_AND_FINISH();
8871 IEM_MC_END();
8872 break;
8873
8874 case IEMMODE_32BIT:
8875 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8877 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8878 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8879 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8880 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8881 IEM_MC_REF_EFLAGS(pEFlags);
8882 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8883 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
8884 IEM_MC_ADVANCE_RIP_AND_FINISH();
8885 IEM_MC_END();
8886 break;
8887
8888 case IEMMODE_64BIT:
8889 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8891 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8892 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8893 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8894 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8895 IEM_MC_REF_EFLAGS(pEFlags);
8896 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8897 IEM_MC_ADVANCE_RIP_AND_FINISH();
8898 IEM_MC_END();
8899 break;
8900
8901 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8902 }
8903 }
8904 else
8905 {
8906 /* memory */
8907 switch (pVCpu->iem.s.enmEffOpSize)
8908 {
8909 case IEMMODE_16BIT:
8910 IEM_MC_BEGIN(3, 3, 0, 0);
8911 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8912 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8913 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8914 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8915 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8916
8917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8919 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8920 IEM_MC_FETCH_EFLAGS(EFlags);
8921 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8922
8923 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8924 IEM_MC_COMMIT_EFLAGS(EFlags);
8925 IEM_MC_ADVANCE_RIP_AND_FINISH();
8926 IEM_MC_END();
8927 break;
8928
8929 case IEMMODE_32BIT:
8930 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8931 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8932 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8933 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8935 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8936
8937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8939 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8940 IEM_MC_FETCH_EFLAGS(EFlags);
8941 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8942
8943 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8944 IEM_MC_COMMIT_EFLAGS(EFlags);
8945 IEM_MC_ADVANCE_RIP_AND_FINISH();
8946 IEM_MC_END();
8947 break;
8948
8949 case IEMMODE_64BIT:
8950 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8951 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8952 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8953 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8955 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8956
8957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8959 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8960 IEM_MC_FETCH_EFLAGS(EFlags);
8961 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8962
8963 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8964 IEM_MC_COMMIT_EFLAGS(EFlags);
8965 IEM_MC_ADVANCE_RIP_AND_FINISH();
8966 IEM_MC_END();
8967 break;
8968
8969 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8970 }
8971 }
8972}
8973
8974
8975/**
8976 * @opcode 0xd2
8977 */
8978FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
8979{
8980 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8981 PCIEMOPSHIFTSIZES pImpl;
8982 switch (IEM_GET_MODRM_REG_8(bRm))
8983 {
8984 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
8985 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
8986 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
8987 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
8988 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
8989 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
8990 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
8991 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8992 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
8993 }
8994 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8995
8996 if (IEM_IS_MODRM_REG_MODE(bRm))
8997 {
8998 /* register */
8999 IEM_MC_BEGIN(3, 0, 0, 0);
9000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9001 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9002 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9003 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9004 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9005 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9006 IEM_MC_REF_EFLAGS(pEFlags);
9007 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
9008 IEM_MC_ADVANCE_RIP_AND_FINISH();
9009 IEM_MC_END();
9010 }
9011 else
9012 {
9013 /* memory */
9014 IEM_MC_BEGIN(3, 3, 0, 0);
9015 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9016 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9017 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9018 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9019 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9020
9021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9023 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9024 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9025 IEM_MC_FETCH_EFLAGS(EFlags);
9026 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
9027
9028 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
9029 IEM_MC_COMMIT_EFLAGS(EFlags);
9030 IEM_MC_ADVANCE_RIP_AND_FINISH();
9031 IEM_MC_END();
9032 }
9033}
9034
9035
9036/**
9037 * @opcode 0xd3
9038 */
9039FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
9040{
9041 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9042 PCIEMOPSHIFTSIZES pImpl;
9043 switch (IEM_GET_MODRM_REG_8(bRm))
9044 {
9045 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
9046 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
9047 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
9048 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
9049 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
9050 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
9051 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
9052 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9053 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
9054 }
9055 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
9056
9057 if (IEM_IS_MODRM_REG_MODE(bRm))
9058 {
9059 /* register */
9060 switch (pVCpu->iem.s.enmEffOpSize)
9061 {
9062 case IEMMODE_16BIT:
9063 IEM_MC_BEGIN(3, 0, 0, 0);
9064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9065 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9066 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9067 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9068 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9069 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9070 IEM_MC_REF_EFLAGS(pEFlags);
9071 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
9072 IEM_MC_ADVANCE_RIP_AND_FINISH();
9073 IEM_MC_END();
9074 break;
9075
9076 case IEMMODE_32BIT:
9077 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
9078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9079 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9080 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9081 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9082 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9083 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9084 IEM_MC_REF_EFLAGS(pEFlags);
9085 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
9086 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
9087 IEM_MC_ADVANCE_RIP_AND_FINISH();
9088 IEM_MC_END();
9089 break;
9090
9091 case IEMMODE_64BIT:
9092 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
9093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9094 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9095 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9096 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9097 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9098 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9099 IEM_MC_REF_EFLAGS(pEFlags);
9100 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
9101 IEM_MC_ADVANCE_RIP_AND_FINISH();
9102 IEM_MC_END();
9103 break;
9104
9105 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9106 }
9107 }
9108 else
9109 {
9110 /* memory */
9111 switch (pVCpu->iem.s.enmEffOpSize)
9112 {
9113 case IEMMODE_16BIT:
9114 IEM_MC_BEGIN(3, 3, 0, 0);
9115 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9116 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9117 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9119 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9120
9121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9123 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9124 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9125 IEM_MC_FETCH_EFLAGS(EFlags);
9126 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
9127
9128 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
9129 IEM_MC_COMMIT_EFLAGS(EFlags);
9130 IEM_MC_ADVANCE_RIP_AND_FINISH();
9131 IEM_MC_END();
9132 break;
9133
9134 case IEMMODE_32BIT:
9135 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
9136 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9137 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9138 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9140 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9141
9142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9144 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9145 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9146 IEM_MC_FETCH_EFLAGS(EFlags);
9147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
9148
9149 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
9150 IEM_MC_COMMIT_EFLAGS(EFlags);
9151 IEM_MC_ADVANCE_RIP_AND_FINISH();
9152 IEM_MC_END();
9153 break;
9154
9155 case IEMMODE_64BIT:
9156 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
9157 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9158 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9159 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9161 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9162
9163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9165 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9166 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9167 IEM_MC_FETCH_EFLAGS(EFlags);
9168 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
9169
9170 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
9171 IEM_MC_COMMIT_EFLAGS(EFlags);
9172 IEM_MC_ADVANCE_RIP_AND_FINISH();
9173 IEM_MC_END();
9174 break;
9175
9176 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9177 }
9178 }
9179}
9180
9181/**
9182 * @opcode 0xd4
9183 * @opflmodify cf,pf,af,zf,sf,of
9184 * @opflundef cf,af,of
9185 */
9186FNIEMOP_DEF(iemOp_aam_Ib)
9187{
9188/** @todo testcase: aam */
9189 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
9190 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9192 IEMOP_HLP_NO_64BIT();
9193 if (!bImm)
9194 IEMOP_RAISE_DIVIDE_ERROR_RET();
9195 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aam, bImm);
9196}
9197
9198
9199/**
9200 * @opcode 0xd5
9201 * @opflmodify cf,pf,af,zf,sf,of
9202 * @opflundef cf,af,of
9203 */
9204FNIEMOP_DEF(iemOp_aad_Ib)
9205{
9206/** @todo testcase: aad? */
9207 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
9208 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9210 IEMOP_HLP_NO_64BIT();
9211 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aad, bImm);
9212}
9213
9214
9215/**
9216 * @opcode 0xd6
9217 */
9218FNIEMOP_DEF(iemOp_salc)
9219{
9220 IEMOP_MNEMONIC(salc, "salc");
9221 IEMOP_HLP_NO_64BIT();
9222
9223 IEM_MC_BEGIN(0, 0, 0, 0);
9224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9225 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9226 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
9227 } IEM_MC_ELSE() {
9228 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
9229 } IEM_MC_ENDIF();
9230 IEM_MC_ADVANCE_RIP_AND_FINISH();
9231 IEM_MC_END();
9232}
9233
9234
9235/**
9236 * @opcode 0xd7
9237 */
9238FNIEMOP_DEF(iemOp_xlat)
9239{
9240 IEMOP_MNEMONIC(xlat, "xlat");
9241 switch (pVCpu->iem.s.enmEffAddrMode)
9242 {
9243 case IEMMODE_16BIT:
9244 IEM_MC_BEGIN(2, 0, 0, 0);
9245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9246 IEM_MC_LOCAL(uint8_t, u8Tmp);
9247 IEM_MC_LOCAL(uint16_t, u16Addr);
9248 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
9249 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
9250 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
9251 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9252 IEM_MC_ADVANCE_RIP_AND_FINISH();
9253 IEM_MC_END();
9254 break;
9255
9256 case IEMMODE_32BIT:
9257 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
9258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9259 IEM_MC_LOCAL(uint8_t, u8Tmp);
9260 IEM_MC_LOCAL(uint32_t, u32Addr);
9261 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
9262 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
9263 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
9264 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9265 IEM_MC_ADVANCE_RIP_AND_FINISH();
9266 IEM_MC_END();
9267 break;
9268
9269 case IEMMODE_64BIT:
9270 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
9271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9272 IEM_MC_LOCAL(uint8_t, u8Tmp);
9273 IEM_MC_LOCAL(uint64_t, u64Addr);
9274 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
9275 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
9276 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
9277 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9278 IEM_MC_ADVANCE_RIP_AND_FINISH();
9279 IEM_MC_END();
9280 break;
9281
9282 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9283 }
9284}
9285
9286
9287/**
9288 * Common worker for FPU instructions working on ST0 and STn, and storing the
9289 * result in ST0.
9290 *
9291 * @param bRm Mod R/M byte.
9292 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9293 */
9294FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9295{
9296 IEM_MC_BEGIN(3, 1, 0, 0);
9297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9298 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9299 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9300 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9301 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9302
9303 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9304 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9305 IEM_MC_PREPARE_FPU_USAGE();
9306 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9307 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9308 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9309 } IEM_MC_ELSE() {
9310 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9311 } IEM_MC_ENDIF();
9312 IEM_MC_ADVANCE_RIP_AND_FINISH();
9313
9314 IEM_MC_END();
9315}
9316
9317
9318/**
9319 * Common worker for FPU instructions working on ST0 and STn, and only affecting
9320 * flags.
9321 *
9322 * @param bRm Mod R/M byte.
9323 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9324 */
9325FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9326{
9327 IEM_MC_BEGIN(3, 1, 0, 0);
9328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9329 IEM_MC_LOCAL(uint16_t, u16Fsw);
9330 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9331 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9332 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9333
9334 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9335 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9336 IEM_MC_PREPARE_FPU_USAGE();
9337 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9338 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9339 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9340 } IEM_MC_ELSE() {
9341 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9342 } IEM_MC_ENDIF();
9343 IEM_MC_ADVANCE_RIP_AND_FINISH();
9344
9345 IEM_MC_END();
9346}
9347
9348
9349/**
9350 * Common worker for FPU instructions working on ST0 and STn, only affecting
9351 * flags, and popping when done.
9352 *
9353 * @param bRm Mod R/M byte.
9354 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9355 */
9356FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9357{
9358 IEM_MC_BEGIN(3, 1, 0, 0);
9359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9360 IEM_MC_LOCAL(uint16_t, u16Fsw);
9361 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9362 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9363 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9364
9365 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9366 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9367 IEM_MC_PREPARE_FPU_USAGE();
9368 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9369 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9370 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9371 } IEM_MC_ELSE() {
9372 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9373 } IEM_MC_ENDIF();
9374 IEM_MC_ADVANCE_RIP_AND_FINISH();
9375
9376 IEM_MC_END();
9377}
9378
9379
9380/** Opcode 0xd8 11/0. */
9381FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
9382{
9383 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
9384 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
9385}
9386
9387
9388/** Opcode 0xd8 11/1. */
9389FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
9390{
9391 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
9392 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
9393}
9394
9395
9396/** Opcode 0xd8 11/2. */
9397FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
9398{
9399 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
9400 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
9401}
9402
9403
9404/** Opcode 0xd8 11/3. */
9405FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
9406{
9407 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
9408 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
9409}
9410
9411
9412/** Opcode 0xd8 11/4. */
9413FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
9414{
9415 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
9416 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
9417}
9418
9419
9420/** Opcode 0xd8 11/5. */
9421FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
9422{
9423 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
9424 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
9425}
9426
9427
9428/** Opcode 0xd8 11/6. */
9429FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
9430{
9431 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
9432 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
9433}
9434
9435
9436/** Opcode 0xd8 11/7. */
9437FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
9438{
9439 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
9440 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
9441}
9442
9443
9444/**
9445 * Common worker for FPU instructions working on ST0 and an m32r, and storing
9446 * the result in ST0.
9447 *
9448 * @param bRm Mod R/M byte.
9449 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9450 */
9451FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
9452{
9453 IEM_MC_BEGIN(3, 3, 0, 0);
9454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9455 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9456 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9457 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9458 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9459 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9460
9461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9463
9464 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9465 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9466 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9467
9468 IEM_MC_PREPARE_FPU_USAGE();
9469 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9470 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
9471 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9472 } IEM_MC_ELSE() {
9473 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9474 } IEM_MC_ENDIF();
9475 IEM_MC_ADVANCE_RIP_AND_FINISH();
9476
9477 IEM_MC_END();
9478}
9479
9480
9481/** Opcode 0xd8 !11/0. */
9482FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
9483{
9484 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
9485 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
9486}
9487
9488
9489/** Opcode 0xd8 !11/1. */
9490FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
9491{
9492 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
9493 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
9494}
9495
9496
9497/** Opcode 0xd8 !11/2. */
9498FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
9499{
9500 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
9501
9502 IEM_MC_BEGIN(3, 3, 0, 0);
9503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9504 IEM_MC_LOCAL(uint16_t, u16Fsw);
9505 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9506 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9507 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9508 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9509
9510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9512
9513 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9514 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9515 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9516
9517 IEM_MC_PREPARE_FPU_USAGE();
9518 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9519 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
9520 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9521 } IEM_MC_ELSE() {
9522 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9523 } IEM_MC_ENDIF();
9524 IEM_MC_ADVANCE_RIP_AND_FINISH();
9525
9526 IEM_MC_END();
9527}
9528
9529
9530/** Opcode 0xd8 !11/3. */
9531FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
9532{
9533 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
9534
9535 IEM_MC_BEGIN(3, 3, 0, 0);
9536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9537 IEM_MC_LOCAL(uint16_t, u16Fsw);
9538 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9539 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9540 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9541 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9542
9543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9545
9546 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9547 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9548 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9549
9550 IEM_MC_PREPARE_FPU_USAGE();
9551 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9552 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
9553 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9554 } IEM_MC_ELSE() {
9555 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9556 } IEM_MC_ENDIF();
9557 IEM_MC_ADVANCE_RIP_AND_FINISH();
9558
9559 IEM_MC_END();
9560}
9561
9562
9563/** Opcode 0xd8 !11/4. */
9564FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
9565{
9566 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
9567 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
9568}
9569
9570
9571/** Opcode 0xd8 !11/5. */
9572FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
9573{
9574 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
9575 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
9576}
9577
9578
9579/** Opcode 0xd8 !11/6. */
9580FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
9581{
9582 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
9583 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
9584}
9585
9586
9587/** Opcode 0xd8 !11/7. */
9588FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
9589{
9590 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
9591 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
9592}
9593
9594
9595/**
9596 * @opcode 0xd8
9597 */
9598FNIEMOP_DEF(iemOp_EscF0)
9599{
9600 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9601 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
9602
9603 if (IEM_IS_MODRM_REG_MODE(bRm))
9604 {
9605 switch (IEM_GET_MODRM_REG_8(bRm))
9606 {
9607 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
9608 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
9609 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
9610 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9611 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
9612 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
9613 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
9614 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
9615 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9616 }
9617 }
9618 else
9619 {
9620 switch (IEM_GET_MODRM_REG_8(bRm))
9621 {
9622 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
9623 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
9624 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
9625 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
9626 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
9627 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
9628 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
9629 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
9630 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9631 }
9632 }
9633}
9634
9635
9636/** Opcode 0xd9 /0 mem32real
9637 * @sa iemOp_fld_m64r */
9638FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
9639{
9640 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
9641
9642 IEM_MC_BEGIN(2, 3, 0, 0);
9643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9644 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9645 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
9646 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9647 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
9648
9649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9651
9652 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9653 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9654 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9655 IEM_MC_PREPARE_FPU_USAGE();
9656 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9657 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
9658 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9659 } IEM_MC_ELSE() {
9660 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9661 } IEM_MC_ENDIF();
9662 IEM_MC_ADVANCE_RIP_AND_FINISH();
9663
9664 IEM_MC_END();
9665}
9666
9667
9668/** Opcode 0xd9 !11/2 mem32real */
9669FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
9670{
9671 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
9672 IEM_MC_BEGIN(3, 3, 0, 0);
9673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9675
9676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9677 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9678 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9679 IEM_MC_PREPARE_FPU_USAGE();
9680
9681 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9682 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9683 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9684
9685 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9686 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9687 IEM_MC_LOCAL(uint16_t, u16Fsw);
9688 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
9689 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9690 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
9691 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9692 } IEM_MC_ELSE() {
9693 IEM_MC_IF_FCW_IM() {
9694 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9695 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
9696 } IEM_MC_ELSE() {
9697 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
9698 } IEM_MC_ENDIF();
9699 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9700 } IEM_MC_ENDIF();
9701 IEM_MC_ADVANCE_RIP_AND_FINISH();
9702
9703 IEM_MC_END();
9704}
9705
9706
9707/** Opcode 0xd9 !11/3 */
9708FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
9709{
9710 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
9711 IEM_MC_BEGIN(3, 3, 0, 0);
9712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9714
9715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9716 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9717 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9718 IEM_MC_PREPARE_FPU_USAGE();
9719
9720 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9721 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9722 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9723
9724 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9725 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9726 IEM_MC_LOCAL(uint16_t, u16Fsw);
9727 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
9728 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9729 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
9730 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9731 } IEM_MC_ELSE() {
9732 IEM_MC_IF_FCW_IM() {
9733 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9734 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
9735 } IEM_MC_ELSE() {
9736 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
9737 } IEM_MC_ENDIF();
9738 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9739 } IEM_MC_ENDIF();
9740 IEM_MC_ADVANCE_RIP_AND_FINISH();
9741
9742 IEM_MC_END();
9743}
9744
9745
9746/** Opcode 0xd9 !11/4 */
9747FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
9748{
9749 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
9750 IEM_MC_BEGIN(3, 0, 0, 0);
9751 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9753
9754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9755 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9756 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9757
9758 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9759 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
9760 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9761 IEM_MC_END();
9762}
9763
9764
9765/** Opcode 0xd9 !11/5 */
9766FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
9767{
9768 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
9769 IEM_MC_BEGIN(1, 1, 0, 0);
9770 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9772
9773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9774 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9775 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9776
9777 IEM_MC_ARG(uint16_t, u16Fsw, 0);
9778 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9779
9780 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, 0, iemCImpl_fldcw, u16Fsw);
9781 IEM_MC_END();
9782}
9783
9784
9785/** Opcode 0xd9 !11/6 */
9786FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
9787{
9788 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
9789 IEM_MC_BEGIN(3, 0, 0, 0);
9790 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9792
9793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9794 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9795 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9796
9797 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9798 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
9799 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
9800 IEM_MC_END();
9801}
9802
9803
9804/** Opcode 0xd9 !11/7 */
9805FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
9806{
9807 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
9808 IEM_MC_BEGIN(2, 0, 0, 0);
9809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9810 IEM_MC_LOCAL(uint16_t, u16Fcw);
9811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9813 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9814 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9815 IEM_MC_FETCH_FCW(u16Fcw);
9816 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
9817 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9818 IEM_MC_END();
9819}
9820
9821
9822/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
9823FNIEMOP_DEF(iemOp_fnop)
9824{
9825 IEMOP_MNEMONIC(fnop, "fnop");
9826 IEM_MC_BEGIN(0, 0, 0, 0);
9827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9828 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9829 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9830 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9831 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
9832 * intel optimizations. Investigate. */
9833 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9834 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9835 IEM_MC_END();
9836}
9837
9838
9839/** Opcode 0xd9 11/0 stN */
9840FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
9841{
9842 IEMOP_MNEMONIC(fld_stN, "fld stN");
9843 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9844 * indicates that it does. */
9845 IEM_MC_BEGIN(0, 2, 0, 0);
9846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9847 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9848 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9849 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9850 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9851
9852 IEM_MC_PREPARE_FPU_USAGE();
9853 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
9854 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9855 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9856 } IEM_MC_ELSE() {
9857 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
9858 } IEM_MC_ENDIF();
9859
9860 IEM_MC_ADVANCE_RIP_AND_FINISH();
9861 IEM_MC_END();
9862}
9863
9864
9865/** Opcode 0xd9 11/3 stN */
9866FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
9867{
9868 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
9869 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9870 * indicates that it does. */
9871 IEM_MC_BEGIN(2, 3, 0, 0);
9872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9873 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
9874 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
9875 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9876 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
9877 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
9878 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9879 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9880
9881 IEM_MC_PREPARE_FPU_USAGE();
9882 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9883 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
9884 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
9885 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9886 } IEM_MC_ELSE() {
9887 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
9888 } IEM_MC_ENDIF();
9889
9890 IEM_MC_ADVANCE_RIP_AND_FINISH();
9891 IEM_MC_END();
9892}
9893
9894
9895/** Opcode 0xd9 11/4, 0xdd 11/2. */
9896FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
9897{
9898 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
9899
9900 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
9901 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
9902 if (!iDstReg)
9903 {
9904 IEM_MC_BEGIN(0, 1, 0, 0);
9905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9906 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
9907 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9908 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9909
9910 IEM_MC_PREPARE_FPU_USAGE();
9911 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
9912 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9913 } IEM_MC_ELSE() {
9914 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
9915 } IEM_MC_ENDIF();
9916
9917 IEM_MC_ADVANCE_RIP_AND_FINISH();
9918 IEM_MC_END();
9919 }
9920 else
9921 {
9922 IEM_MC_BEGIN(0, 2, 0, 0);
9923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9924 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9925 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9926 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9927 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9928
9929 IEM_MC_PREPARE_FPU_USAGE();
9930 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9931 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9932 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
9933 } IEM_MC_ELSE() {
9934 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
9935 } IEM_MC_ENDIF();
9936
9937 IEM_MC_ADVANCE_RIP_AND_FINISH();
9938 IEM_MC_END();
9939 }
9940}
9941
9942
9943/**
9944 * Common worker for FPU instructions working on ST0 and replaces it with the
9945 * result, i.e. unary operators.
9946 *
9947 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9948 */
9949FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
9950{
9951 IEM_MC_BEGIN(2, 1, 0, 0);
9952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9953 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9954 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9955 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9956
9957 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9958 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9959 IEM_MC_PREPARE_FPU_USAGE();
9960 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9961 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
9962 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9963 } IEM_MC_ELSE() {
9964 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9965 } IEM_MC_ENDIF();
9966 IEM_MC_ADVANCE_RIP_AND_FINISH();
9967
9968 IEM_MC_END();
9969}
9970
9971
9972/** Opcode 0xd9 0xe0. */
9973FNIEMOP_DEF(iemOp_fchs)
9974{
9975 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
9976 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
9977}
9978
9979
9980/** Opcode 0xd9 0xe1. */
9981FNIEMOP_DEF(iemOp_fabs)
9982{
9983 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
9984 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
9985}
9986
9987
9988/** Opcode 0xd9 0xe4. */
9989FNIEMOP_DEF(iemOp_ftst)
9990{
9991 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
9992 IEM_MC_BEGIN(2, 1, 0, 0);
9993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9994 IEM_MC_LOCAL(uint16_t, u16Fsw);
9995 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9996 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9997
9998 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9999 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10000 IEM_MC_PREPARE_FPU_USAGE();
10001 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10002 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
10003 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10004 } IEM_MC_ELSE() {
10005 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10006 } IEM_MC_ENDIF();
10007 IEM_MC_ADVANCE_RIP_AND_FINISH();
10008
10009 IEM_MC_END();
10010}
10011
10012
10013/** Opcode 0xd9 0xe5. */
10014FNIEMOP_DEF(iemOp_fxam)
10015{
10016 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
10017 IEM_MC_BEGIN(2, 1, 0, 0);
10018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10019 IEM_MC_LOCAL(uint16_t, u16Fsw);
10020 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10021 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10022
10023 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10024 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10025 IEM_MC_PREPARE_FPU_USAGE();
10026 IEM_MC_REF_FPUREG(pr80Value, 0);
10027 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
10028 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10029 IEM_MC_ADVANCE_RIP_AND_FINISH();
10030
10031 IEM_MC_END();
10032}
10033
10034
10035/**
10036 * Common worker for FPU instructions pushing a constant onto the FPU stack.
10037 *
10038 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10039 */
10040FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
10041{
10042 IEM_MC_BEGIN(1, 1, 0, 0);
10043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10044 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10045 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10046
10047 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10048 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10049 IEM_MC_PREPARE_FPU_USAGE();
10050 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10051 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
10052 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10053 } IEM_MC_ELSE() {
10054 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
10055 } IEM_MC_ENDIF();
10056 IEM_MC_ADVANCE_RIP_AND_FINISH();
10057
10058 IEM_MC_END();
10059}
10060
10061
10062/** Opcode 0xd9 0xe8. */
10063FNIEMOP_DEF(iemOp_fld1)
10064{
10065 IEMOP_MNEMONIC(fld1, "fld1");
10066 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
10067}
10068
10069
10070/** Opcode 0xd9 0xe9. */
10071FNIEMOP_DEF(iemOp_fldl2t)
10072{
10073 IEMOP_MNEMONIC(fldl2t, "fldl2t");
10074 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
10075}
10076
10077
10078/** Opcode 0xd9 0xea. */
10079FNIEMOP_DEF(iemOp_fldl2e)
10080{
10081 IEMOP_MNEMONIC(fldl2e, "fldl2e");
10082 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
10083}
10084
10085/** Opcode 0xd9 0xeb. */
10086FNIEMOP_DEF(iemOp_fldpi)
10087{
10088 IEMOP_MNEMONIC(fldpi, "fldpi");
10089 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
10090}
10091
10092
10093/** Opcode 0xd9 0xec. */
10094FNIEMOP_DEF(iemOp_fldlg2)
10095{
10096 IEMOP_MNEMONIC(fldlg2, "fldlg2");
10097 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
10098}
10099
10100/** Opcode 0xd9 0xed. */
10101FNIEMOP_DEF(iemOp_fldln2)
10102{
10103 IEMOP_MNEMONIC(fldln2, "fldln2");
10104 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
10105}
10106
10107
10108/** Opcode 0xd9 0xee. */
10109FNIEMOP_DEF(iemOp_fldz)
10110{
10111 IEMOP_MNEMONIC(fldz, "fldz");
10112 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
10113}
10114
10115
10116/** Opcode 0xd9 0xf0.
10117 *
10118 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
10119 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
10120 * to produce proper results for +Inf and -Inf.
10121 *
10122 * This is probably usful in the implementation pow() and similar.
10123 */
10124FNIEMOP_DEF(iemOp_f2xm1)
10125{
10126 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
10127 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
10128}
10129
10130
10131/**
10132 * Common worker for FPU instructions working on STn and ST0, storing the result
10133 * in STn, and popping the stack unless IE, DE or ZE was raised.
10134 *
10135 * @param bRm Mod R/M byte.
10136 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10137 */
10138FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10139{
10140 IEM_MC_BEGIN(3, 1, 0, 0);
10141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10142 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10143 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10144 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10145 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10146
10147 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10148 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10149
10150 IEM_MC_PREPARE_FPU_USAGE();
10151 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10152 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10153 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10154 } IEM_MC_ELSE() {
10155 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10156 } IEM_MC_ENDIF();
10157 IEM_MC_ADVANCE_RIP_AND_FINISH();
10158
10159 IEM_MC_END();
10160}
10161
10162
10163/** Opcode 0xd9 0xf1. */
10164FNIEMOP_DEF(iemOp_fyl2x)
10165{
10166 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
10167 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
10168}
10169
10170
10171/**
10172 * Common worker for FPU instructions working on ST0 and having two outputs, one
10173 * replacing ST0 and one pushed onto the stack.
10174 *
10175 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10176 */
10177FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
10178{
10179 IEM_MC_BEGIN(2, 1, 0, 0);
10180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10181 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
10182 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
10183 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10184
10185 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10186 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10187 IEM_MC_PREPARE_FPU_USAGE();
10188 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10189 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
10190 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
10191 } IEM_MC_ELSE() {
10192 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
10193 } IEM_MC_ENDIF();
10194 IEM_MC_ADVANCE_RIP_AND_FINISH();
10195
10196 IEM_MC_END();
10197}
10198
10199
10200/** Opcode 0xd9 0xf2. */
10201FNIEMOP_DEF(iemOp_fptan)
10202{
10203 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
10204 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
10205}
10206
10207
10208/** Opcode 0xd9 0xf3. */
10209FNIEMOP_DEF(iemOp_fpatan)
10210{
10211 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
10212 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
10213}
10214
10215
10216/** Opcode 0xd9 0xf4. */
10217FNIEMOP_DEF(iemOp_fxtract)
10218{
10219 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
10220 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
10221}
10222
10223
10224/** Opcode 0xd9 0xf5. */
10225FNIEMOP_DEF(iemOp_fprem1)
10226{
10227 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
10228 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
10229}
10230
10231
10232/** Opcode 0xd9 0xf6. */
10233FNIEMOP_DEF(iemOp_fdecstp)
10234{
10235 IEMOP_MNEMONIC(fdecstp, "fdecstp");
10236 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10237 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10238 * FINCSTP and FDECSTP. */
10239 IEM_MC_BEGIN(0, 0, 0, 0);
10240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10241
10242 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10243 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10244
10245 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10246 IEM_MC_FPU_STACK_DEC_TOP();
10247 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10248
10249 IEM_MC_ADVANCE_RIP_AND_FINISH();
10250 IEM_MC_END();
10251}
10252
10253
10254/** Opcode 0xd9 0xf7. */
10255FNIEMOP_DEF(iemOp_fincstp)
10256{
10257 IEMOP_MNEMONIC(fincstp, "fincstp");
10258 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10259 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10260 * FINCSTP and FDECSTP. */
10261 IEM_MC_BEGIN(0, 0, 0, 0);
10262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10263
10264 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10265 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10266
10267 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10268 IEM_MC_FPU_STACK_INC_TOP();
10269 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10270
10271 IEM_MC_ADVANCE_RIP_AND_FINISH();
10272 IEM_MC_END();
10273}
10274
10275
10276/** Opcode 0xd9 0xf8. */
10277FNIEMOP_DEF(iemOp_fprem)
10278{
10279 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
10280 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
10281}
10282
10283
10284/** Opcode 0xd9 0xf9. */
10285FNIEMOP_DEF(iemOp_fyl2xp1)
10286{
10287 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
10288 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
10289}
10290
10291
10292/** Opcode 0xd9 0xfa. */
10293FNIEMOP_DEF(iemOp_fsqrt)
10294{
10295 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
10296 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
10297}
10298
10299
10300/** Opcode 0xd9 0xfb. */
10301FNIEMOP_DEF(iemOp_fsincos)
10302{
10303 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
10304 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
10305}
10306
10307
10308/** Opcode 0xd9 0xfc. */
10309FNIEMOP_DEF(iemOp_frndint)
10310{
10311 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
10312 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
10313}
10314
10315
10316/** Opcode 0xd9 0xfd. */
10317FNIEMOP_DEF(iemOp_fscale)
10318{
10319 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
10320 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
10321}
10322
10323
10324/** Opcode 0xd9 0xfe. */
10325FNIEMOP_DEF(iemOp_fsin)
10326{
10327 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
10328 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
10329}
10330
10331
10332/** Opcode 0xd9 0xff. */
10333FNIEMOP_DEF(iemOp_fcos)
10334{
10335 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
10336 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
10337}
10338
10339
10340/** Used by iemOp_EscF1. */
10341IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
10342{
10343 /* 0xe0 */ iemOp_fchs,
10344 /* 0xe1 */ iemOp_fabs,
10345 /* 0xe2 */ iemOp_Invalid,
10346 /* 0xe3 */ iemOp_Invalid,
10347 /* 0xe4 */ iemOp_ftst,
10348 /* 0xe5 */ iemOp_fxam,
10349 /* 0xe6 */ iemOp_Invalid,
10350 /* 0xe7 */ iemOp_Invalid,
10351 /* 0xe8 */ iemOp_fld1,
10352 /* 0xe9 */ iemOp_fldl2t,
10353 /* 0xea */ iemOp_fldl2e,
10354 /* 0xeb */ iemOp_fldpi,
10355 /* 0xec */ iemOp_fldlg2,
10356 /* 0xed */ iemOp_fldln2,
10357 /* 0xee */ iemOp_fldz,
10358 /* 0xef */ iemOp_Invalid,
10359 /* 0xf0 */ iemOp_f2xm1,
10360 /* 0xf1 */ iemOp_fyl2x,
10361 /* 0xf2 */ iemOp_fptan,
10362 /* 0xf3 */ iemOp_fpatan,
10363 /* 0xf4 */ iemOp_fxtract,
10364 /* 0xf5 */ iemOp_fprem1,
10365 /* 0xf6 */ iemOp_fdecstp,
10366 /* 0xf7 */ iemOp_fincstp,
10367 /* 0xf8 */ iemOp_fprem,
10368 /* 0xf9 */ iemOp_fyl2xp1,
10369 /* 0xfa */ iemOp_fsqrt,
10370 /* 0xfb */ iemOp_fsincos,
10371 /* 0xfc */ iemOp_frndint,
10372 /* 0xfd */ iemOp_fscale,
10373 /* 0xfe */ iemOp_fsin,
10374 /* 0xff */ iemOp_fcos
10375};
10376
10377
10378/**
10379 * @opcode 0xd9
10380 */
10381FNIEMOP_DEF(iemOp_EscF1)
10382{
10383 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10384 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
10385
10386 if (IEM_IS_MODRM_REG_MODE(bRm))
10387 {
10388 switch (IEM_GET_MODRM_REG_8(bRm))
10389 {
10390 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
10391 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
10392 case 2:
10393 if (bRm == 0xd0)
10394 return FNIEMOP_CALL(iemOp_fnop);
10395 IEMOP_RAISE_INVALID_OPCODE_RET();
10396 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
10397 case 4:
10398 case 5:
10399 case 6:
10400 case 7:
10401 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
10402 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
10403 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10404 }
10405 }
10406 else
10407 {
10408 switch (IEM_GET_MODRM_REG_8(bRm))
10409 {
10410 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
10411 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
10412 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
10413 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
10414 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
10415 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
10416 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
10417 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
10418 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10419 }
10420 }
10421}
10422
10423
10424/** Opcode 0xda 11/0. */
10425FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
10426{
10427 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
10428 IEM_MC_BEGIN(0, 1, 0, 0);
10429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10430 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10431
10432 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10433 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10434
10435 IEM_MC_PREPARE_FPU_USAGE();
10436 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10437 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10438 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10439 } IEM_MC_ENDIF();
10440 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10441 } IEM_MC_ELSE() {
10442 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10443 } IEM_MC_ENDIF();
10444 IEM_MC_ADVANCE_RIP_AND_FINISH();
10445
10446 IEM_MC_END();
10447}
10448
10449
10450/** Opcode 0xda 11/1. */
10451FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
10452{
10453 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
10454 IEM_MC_BEGIN(0, 1, 0, 0);
10455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10456 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10457
10458 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10459 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10460
10461 IEM_MC_PREPARE_FPU_USAGE();
10462 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10463 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10464 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10465 } IEM_MC_ENDIF();
10466 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10467 } IEM_MC_ELSE() {
10468 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10469 } IEM_MC_ENDIF();
10470 IEM_MC_ADVANCE_RIP_AND_FINISH();
10471
10472 IEM_MC_END();
10473}
10474
10475
10476/** Opcode 0xda 11/2. */
10477FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
10478{
10479 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
10480 IEM_MC_BEGIN(0, 1, 0, 0);
10481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10482 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10483
10484 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10485 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10486
10487 IEM_MC_PREPARE_FPU_USAGE();
10488 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10489 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10490 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10491 } IEM_MC_ENDIF();
10492 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10493 } IEM_MC_ELSE() {
10494 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10495 } IEM_MC_ENDIF();
10496 IEM_MC_ADVANCE_RIP_AND_FINISH();
10497
10498 IEM_MC_END();
10499}
10500
10501
10502/** Opcode 0xda 11/3. */
10503FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
10504{
10505 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
10506 IEM_MC_BEGIN(0, 1, 0, 0);
10507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10508 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10509
10510 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10511 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10512
10513 IEM_MC_PREPARE_FPU_USAGE();
10514 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10515 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10516 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10517 } IEM_MC_ENDIF();
10518 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10519 } IEM_MC_ELSE() {
10520 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10521 } IEM_MC_ENDIF();
10522 IEM_MC_ADVANCE_RIP_AND_FINISH();
10523
10524 IEM_MC_END();
10525}
10526
10527
10528/**
10529 * Common worker for FPU instructions working on ST0 and ST1, only affecting
10530 * flags, and popping twice when done.
10531 *
10532 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10533 */
10534FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10535{
10536 IEM_MC_BEGIN(3, 1, 0, 0);
10537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10538 IEM_MC_LOCAL(uint16_t, u16Fsw);
10539 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10540 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10541 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10542
10543 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10544 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10545
10546 IEM_MC_PREPARE_FPU_USAGE();
10547 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
10548 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10549 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10550 } IEM_MC_ELSE() {
10551 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
10552 } IEM_MC_ENDIF();
10553 IEM_MC_ADVANCE_RIP_AND_FINISH();
10554
10555 IEM_MC_END();
10556}
10557
10558
10559/** Opcode 0xda 0xe9. */
10560FNIEMOP_DEF(iemOp_fucompp)
10561{
10562 IEMOP_MNEMONIC(fucompp, "fucompp");
10563 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
10564}
10565
10566
10567/**
10568 * Common worker for FPU instructions working on ST0 and an m32i, and storing
10569 * the result in ST0.
10570 *
10571 * @param bRm Mod R/M byte.
10572 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10573 */
10574FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
10575{
10576 IEM_MC_BEGIN(3, 3, 0, 0);
10577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10578 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10579 IEM_MC_LOCAL(int32_t, i32Val2);
10580 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10581 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10582 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10583
10584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10586
10587 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10588 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10589 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10590
10591 IEM_MC_PREPARE_FPU_USAGE();
10592 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10593 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
10594 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10595 } IEM_MC_ELSE() {
10596 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10597 } IEM_MC_ENDIF();
10598 IEM_MC_ADVANCE_RIP_AND_FINISH();
10599
10600 IEM_MC_END();
10601}
10602
10603
10604/** Opcode 0xda !11/0. */
10605FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
10606{
10607 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
10608 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
10609}
10610
10611
10612/** Opcode 0xda !11/1. */
10613FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
10614{
10615 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
10616 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
10617}
10618
10619
10620/** Opcode 0xda !11/2. */
10621FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
10622{
10623 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
10624
10625 IEM_MC_BEGIN(3, 3, 0, 0);
10626 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10627 IEM_MC_LOCAL(uint16_t, u16Fsw);
10628 IEM_MC_LOCAL(int32_t, i32Val2);
10629 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10630 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10631 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10632
10633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10635
10636 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10637 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10638 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10639
10640 IEM_MC_PREPARE_FPU_USAGE();
10641 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10642 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10643 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10644 } IEM_MC_ELSE() {
10645 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10646 } IEM_MC_ENDIF();
10647 IEM_MC_ADVANCE_RIP_AND_FINISH();
10648
10649 IEM_MC_END();
10650}
10651
10652
10653/** Opcode 0xda !11/3. */
10654FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
10655{
10656 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
10657
10658 IEM_MC_BEGIN(3, 3, 0, 0);
10659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10660 IEM_MC_LOCAL(uint16_t, u16Fsw);
10661 IEM_MC_LOCAL(int32_t, i32Val2);
10662 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10663 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10664 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10665
10666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10668
10669 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10670 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10671 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10672
10673 IEM_MC_PREPARE_FPU_USAGE();
10674 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10675 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10676 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10677 } IEM_MC_ELSE() {
10678 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10679 } IEM_MC_ENDIF();
10680 IEM_MC_ADVANCE_RIP_AND_FINISH();
10681
10682 IEM_MC_END();
10683}
10684
10685
10686/** Opcode 0xda !11/4. */
10687FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
10688{
10689 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
10690 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
10691}
10692
10693
10694/** Opcode 0xda !11/5. */
10695FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
10696{
10697 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
10698 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
10699}
10700
10701
10702/** Opcode 0xda !11/6. */
10703FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
10704{
10705 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
10706 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
10707}
10708
10709
10710/** Opcode 0xda !11/7. */
10711FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
10712{
10713 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
10714 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
10715}
10716
10717
10718/**
10719 * @opcode 0xda
10720 */
10721FNIEMOP_DEF(iemOp_EscF2)
10722{
10723 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10724 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
10725 if (IEM_IS_MODRM_REG_MODE(bRm))
10726 {
10727 switch (IEM_GET_MODRM_REG_8(bRm))
10728 {
10729 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
10730 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
10731 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
10732 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
10733 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10734 case 5:
10735 if (bRm == 0xe9)
10736 return FNIEMOP_CALL(iemOp_fucompp);
10737 IEMOP_RAISE_INVALID_OPCODE_RET();
10738 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10739 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10740 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10741 }
10742 }
10743 else
10744 {
10745 switch (IEM_GET_MODRM_REG_8(bRm))
10746 {
10747 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
10748 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
10749 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
10750 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
10751 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
10752 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
10753 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
10754 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
10755 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10756 }
10757 }
10758}
10759
10760
10761/** Opcode 0xdb !11/0. */
10762FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
10763{
10764 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
10765
10766 IEM_MC_BEGIN(2, 3, 0, 0);
10767 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10768 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10769 IEM_MC_LOCAL(int32_t, i32Val);
10770 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10771 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
10772
10773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10775
10776 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10777 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10778 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10779
10780 IEM_MC_PREPARE_FPU_USAGE();
10781 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10782 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
10783 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10784 } IEM_MC_ELSE() {
10785 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10786 } IEM_MC_ENDIF();
10787 IEM_MC_ADVANCE_RIP_AND_FINISH();
10788
10789 IEM_MC_END();
10790}
10791
10792
10793/** Opcode 0xdb !11/1. */
10794FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
10795{
10796 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
10797 IEM_MC_BEGIN(3, 3, 0, 0);
10798 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10800
10801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10802 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10803 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10804 IEM_MC_PREPARE_FPU_USAGE();
10805
10806 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10807 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10808 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10809
10810 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10811 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10812 IEM_MC_LOCAL(uint16_t, u16Fsw);
10813 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10814 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10815 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10816 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10817 } IEM_MC_ELSE() {
10818 IEM_MC_IF_FCW_IM() {
10819 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10820 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10821 } IEM_MC_ELSE() {
10822 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10823 } IEM_MC_ENDIF();
10824 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10825 } IEM_MC_ENDIF();
10826 IEM_MC_ADVANCE_RIP_AND_FINISH();
10827
10828 IEM_MC_END();
10829}
10830
10831
10832/** Opcode 0xdb !11/2. */
10833FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
10834{
10835 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
10836 IEM_MC_BEGIN(3, 3, 0, 0);
10837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10839
10840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10841 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10842 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10843 IEM_MC_PREPARE_FPU_USAGE();
10844
10845 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10846 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10847 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10848
10849 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10850 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10851 IEM_MC_LOCAL(uint16_t, u16Fsw);
10852 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10853 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10854 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10855 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10856 } IEM_MC_ELSE() {
10857 IEM_MC_IF_FCW_IM() {
10858 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10859 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10860 } IEM_MC_ELSE() {
10861 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10862 } IEM_MC_ENDIF();
10863 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10864 } IEM_MC_ENDIF();
10865 IEM_MC_ADVANCE_RIP_AND_FINISH();
10866
10867 IEM_MC_END();
10868}
10869
10870
10871/** Opcode 0xdb !11/3. */
10872FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
10873{
10874 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
10875 IEM_MC_BEGIN(3, 2, 0, 0);
10876 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10878
10879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10880 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10881 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10882 IEM_MC_PREPARE_FPU_USAGE();
10883
10884 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10885 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10886 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10887
10888 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10889 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10890 IEM_MC_LOCAL(uint16_t, u16Fsw);
10891 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10892 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10893 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10894 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10895 } IEM_MC_ELSE() {
10896 IEM_MC_IF_FCW_IM() {
10897 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10898 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10899 } IEM_MC_ELSE() {
10900 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10901 } IEM_MC_ENDIF();
10902 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10903 } IEM_MC_ENDIF();
10904 IEM_MC_ADVANCE_RIP_AND_FINISH();
10905
10906 IEM_MC_END();
10907}
10908
10909
10910/** Opcode 0xdb !11/5. */
10911FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
10912{
10913 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
10914
10915 IEM_MC_BEGIN(2, 3, 0, 0);
10916 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10917 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10918 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
10919 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10920 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
10921
10922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10924
10925 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10926 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10927 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10928
10929 IEM_MC_PREPARE_FPU_USAGE();
10930 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10931 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
10932 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10933 } IEM_MC_ELSE() {
10934 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10935 } IEM_MC_ENDIF();
10936 IEM_MC_ADVANCE_RIP_AND_FINISH();
10937
10938 IEM_MC_END();
10939}
10940
10941
10942/** Opcode 0xdb !11/7. */
10943FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
10944{
10945 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
10946 IEM_MC_BEGIN(3, 3, 0, 0);
10947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10949
10950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10951 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10952 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10953 IEM_MC_PREPARE_FPU_USAGE();
10954
10955 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10956 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
10957 IEM_MC_MEM_MAP_R80_WO(pr80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10958
10959 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10960 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10961 IEM_MC_LOCAL(uint16_t, u16Fsw);
10962 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10963 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
10964 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10965 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10966 } IEM_MC_ELSE() {
10967 IEM_MC_IF_FCW_IM() {
10968 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
10969 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10970 } IEM_MC_ELSE() {
10971 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10972 } IEM_MC_ENDIF();
10973 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10974 } IEM_MC_ENDIF();
10975 IEM_MC_ADVANCE_RIP_AND_FINISH();
10976
10977 IEM_MC_END();
10978}
10979
10980
10981/** Opcode 0xdb 11/0. */
10982FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
10983{
10984 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
10985 IEM_MC_BEGIN(0, 1, 0, 0);
10986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10987 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10988
10989 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10990 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10991
10992 IEM_MC_PREPARE_FPU_USAGE();
10993 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10994 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
10995 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10996 } IEM_MC_ENDIF();
10997 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10998 } IEM_MC_ELSE() {
10999 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11000 } IEM_MC_ENDIF();
11001 IEM_MC_ADVANCE_RIP_AND_FINISH();
11002
11003 IEM_MC_END();
11004}
11005
11006
11007/** Opcode 0xdb 11/1. */
11008FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
11009{
11010 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
11011 IEM_MC_BEGIN(0, 1, 0, 0);
11012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11013 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11014
11015 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11016 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11017
11018 IEM_MC_PREPARE_FPU_USAGE();
11019 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11020 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11021 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11022 } IEM_MC_ENDIF();
11023 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11024 } IEM_MC_ELSE() {
11025 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11026 } IEM_MC_ENDIF();
11027 IEM_MC_ADVANCE_RIP_AND_FINISH();
11028
11029 IEM_MC_END();
11030}
11031
11032
11033/** Opcode 0xdb 11/2. */
11034FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
11035{
11036 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
11037 IEM_MC_BEGIN(0, 1, 0, 0);
11038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11039 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11040
11041 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11042 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11043
11044 IEM_MC_PREPARE_FPU_USAGE();
11045 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11046 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11047 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11048 } IEM_MC_ENDIF();
11049 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11050 } IEM_MC_ELSE() {
11051 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11052 } IEM_MC_ENDIF();
11053 IEM_MC_ADVANCE_RIP_AND_FINISH();
11054
11055 IEM_MC_END();
11056}
11057
11058
11059/** Opcode 0xdb 11/3. */
11060FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
11061{
11062 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
11063 IEM_MC_BEGIN(0, 1, 0, 0);
11064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11065 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11066
11067 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11068 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11069
11070 IEM_MC_PREPARE_FPU_USAGE();
11071 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11072 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
11073 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11074 } IEM_MC_ENDIF();
11075 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11076 } IEM_MC_ELSE() {
11077 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11078 } IEM_MC_ENDIF();
11079 IEM_MC_ADVANCE_RIP_AND_FINISH();
11080
11081 IEM_MC_END();
11082}
11083
11084
11085/** Opcode 0xdb 0xe0. */
11086FNIEMOP_DEF(iemOp_fneni)
11087{
11088 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
11089 IEM_MC_BEGIN(0, 0, 0, 0);
11090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11091 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11092 IEM_MC_ADVANCE_RIP_AND_FINISH();
11093 IEM_MC_END();
11094}
11095
11096
11097/** Opcode 0xdb 0xe1. */
11098FNIEMOP_DEF(iemOp_fndisi)
11099{
11100 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
11101 IEM_MC_BEGIN(0, 0, 0, 0);
11102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11103 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11104 IEM_MC_ADVANCE_RIP_AND_FINISH();
11105 IEM_MC_END();
11106}
11107
11108
11109/** Opcode 0xdb 0xe2. */
11110FNIEMOP_DEF(iemOp_fnclex)
11111{
11112 IEMOP_MNEMONIC(fnclex, "fnclex");
11113 IEM_MC_BEGIN(0, 0, 0, 0);
11114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11115 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11116 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11117 IEM_MC_CLEAR_FSW_EX();
11118 IEM_MC_ADVANCE_RIP_AND_FINISH();
11119 IEM_MC_END();
11120}
11121
11122
11123/** Opcode 0xdb 0xe3. */
11124FNIEMOP_DEF(iemOp_fninit)
11125{
11126 IEMOP_MNEMONIC(fninit, "fninit");
11127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11128 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, 0, iemCImpl_finit, false /*fCheckXcpts*/);
11129}
11130
11131
11132/** Opcode 0xdb 0xe4. */
11133FNIEMOP_DEF(iemOp_fnsetpm)
11134{
11135 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
11136 IEM_MC_BEGIN(0, 0, 0, 0);
11137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11138 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11139 IEM_MC_ADVANCE_RIP_AND_FINISH();
11140 IEM_MC_END();
11141}
11142
11143
11144/** Opcode 0xdb 0xe5. */
11145FNIEMOP_DEF(iemOp_frstpm)
11146{
11147 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
11148#if 0 /* #UDs on newer CPUs */
11149 IEM_MC_BEGIN(0, 0, 0, 0);
11150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11151 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11152 IEM_MC_ADVANCE_RIP_AND_FINISH();
11153 IEM_MC_END();
11154 return VINF_SUCCESS;
11155#else
11156 IEMOP_RAISE_INVALID_OPCODE_RET();
11157#endif
11158}
11159
11160
11161/** Opcode 0xdb 11/5. */
11162FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
11163{
11164 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
11165 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11166 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
11167 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11168}
11169
11170
11171/** Opcode 0xdb 11/6. */
11172FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
11173{
11174 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
11175 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11176 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11177 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11178}
11179
11180
11181/**
11182 * @opcode 0xdb
11183 */
11184FNIEMOP_DEF(iemOp_EscF3)
11185{
11186 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11187 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
11188 if (IEM_IS_MODRM_REG_MODE(bRm))
11189 {
11190 switch (IEM_GET_MODRM_REG_8(bRm))
11191 {
11192 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
11193 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
11194 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
11195 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
11196 case 4:
11197 switch (bRm)
11198 {
11199 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
11200 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
11201 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
11202 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
11203 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
11204 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
11205 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
11206 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
11207 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11208 }
11209 break;
11210 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
11211 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
11212 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11213 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11214 }
11215 }
11216 else
11217 {
11218 switch (IEM_GET_MODRM_REG_8(bRm))
11219 {
11220 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
11221 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
11222 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
11223 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
11224 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11225 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
11226 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11227 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
11228 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11229 }
11230 }
11231}
11232
11233
11234/**
11235 * Common worker for FPU instructions working on STn and ST0, and storing the
11236 * result in STn unless IE, DE or ZE was raised.
11237 *
11238 * @param bRm Mod R/M byte.
11239 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11240 */
11241FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11242{
11243 IEM_MC_BEGIN(3, 1, 0, 0);
11244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11245 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11246 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11247 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11248 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11249
11250 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11251 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11252
11253 IEM_MC_PREPARE_FPU_USAGE();
11254 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
11255 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11256 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11257 } IEM_MC_ELSE() {
11258 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11259 } IEM_MC_ENDIF();
11260 IEM_MC_ADVANCE_RIP_AND_FINISH();
11261
11262 IEM_MC_END();
11263}
11264
11265
11266/** Opcode 0xdc 11/0. */
11267FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
11268{
11269 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
11270 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
11271}
11272
11273
11274/** Opcode 0xdc 11/1. */
11275FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
11276{
11277 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
11278 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
11279}
11280
11281
11282/** Opcode 0xdc 11/4. */
11283FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
11284{
11285 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
11286 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
11287}
11288
11289
11290/** Opcode 0xdc 11/5. */
11291FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
11292{
11293 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
11294 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
11295}
11296
11297
11298/** Opcode 0xdc 11/6. */
11299FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
11300{
11301 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
11302 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
11303}
11304
11305
11306/** Opcode 0xdc 11/7. */
11307FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
11308{
11309 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
11310 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
11311}
11312
11313
11314/**
11315 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
11316 * memory operand, and storing the result in ST0.
11317 *
11318 * @param bRm Mod R/M byte.
11319 * @param pfnImpl Pointer to the instruction implementation (assembly).
11320 */
11321FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
11322{
11323 IEM_MC_BEGIN(3, 3, 0, 0);
11324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11325 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11326 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
11327 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11328 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
11329 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
11330
11331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11333 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11334 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11335
11336 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11337 IEM_MC_PREPARE_FPU_USAGE();
11338 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
11339 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
11340 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11341 } IEM_MC_ELSE() {
11342 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11343 } IEM_MC_ENDIF();
11344 IEM_MC_ADVANCE_RIP_AND_FINISH();
11345
11346 IEM_MC_END();
11347}
11348
11349
11350/** Opcode 0xdc !11/0. */
11351FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
11352{
11353 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
11354 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
11355}
11356
11357
11358/** Opcode 0xdc !11/1. */
11359FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
11360{
11361 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
11362 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
11363}
11364
11365
11366/** Opcode 0xdc !11/2. */
11367FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
11368{
11369 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
11370
11371 IEM_MC_BEGIN(3, 3, 0, 0);
11372 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11373 IEM_MC_LOCAL(uint16_t, u16Fsw);
11374 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11375 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11376 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11377 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11378
11379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11381
11382 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11383 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11384 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11385
11386 IEM_MC_PREPARE_FPU_USAGE();
11387 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11388 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11389 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11390 } IEM_MC_ELSE() {
11391 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11392 } IEM_MC_ENDIF();
11393 IEM_MC_ADVANCE_RIP_AND_FINISH();
11394
11395 IEM_MC_END();
11396}
11397
11398
11399/** Opcode 0xdc !11/3. */
11400FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
11401{
11402 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
11403
11404 IEM_MC_BEGIN(3, 3, 0, 0);
11405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11406 IEM_MC_LOCAL(uint16_t, u16Fsw);
11407 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11408 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11409 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11410 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11411
11412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11414
11415 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11416 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11417 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11418
11419 IEM_MC_PREPARE_FPU_USAGE();
11420 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11421 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11422 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11423 } IEM_MC_ELSE() {
11424 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11425 } IEM_MC_ENDIF();
11426 IEM_MC_ADVANCE_RIP_AND_FINISH();
11427
11428 IEM_MC_END();
11429}
11430
11431
11432/** Opcode 0xdc !11/4. */
11433FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
11434{
11435 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
11436 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
11437}
11438
11439
11440/** Opcode 0xdc !11/5. */
11441FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
11442{
11443 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
11444 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
11445}
11446
11447
11448/** Opcode 0xdc !11/6. */
11449FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
11450{
11451 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
11452 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
11453}
11454
11455
11456/** Opcode 0xdc !11/7. */
11457FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
11458{
11459 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
11460 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
11461}
11462
11463
11464/**
11465 * @opcode 0xdc
11466 */
11467FNIEMOP_DEF(iemOp_EscF4)
11468{
11469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11470 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
11471 if (IEM_IS_MODRM_REG_MODE(bRm))
11472 {
11473 switch (IEM_GET_MODRM_REG_8(bRm))
11474 {
11475 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
11476 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
11477 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
11478 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
11479 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
11480 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
11481 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
11482 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
11483 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11484 }
11485 }
11486 else
11487 {
11488 switch (IEM_GET_MODRM_REG_8(bRm))
11489 {
11490 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
11491 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
11492 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
11493 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
11494 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
11495 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
11496 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
11497 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
11498 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11499 }
11500 }
11501}
11502
11503
11504/** Opcode 0xdd !11/0.
11505 * @sa iemOp_fld_m32r */
11506FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
11507{
11508 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
11509
11510 IEM_MC_BEGIN(2, 3, 0, 0);
11511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11512 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11513 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
11514 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11515 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
11516
11517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11519 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11520 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11521
11522 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11523 IEM_MC_PREPARE_FPU_USAGE();
11524 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11525 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
11526 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11527 } IEM_MC_ELSE() {
11528 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11529 } IEM_MC_ENDIF();
11530 IEM_MC_ADVANCE_RIP_AND_FINISH();
11531
11532 IEM_MC_END();
11533}
11534
11535
11536/** Opcode 0xdd !11/0. */
11537FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
11538{
11539 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
11540 IEM_MC_BEGIN(3, 3, 0, 0);
11541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11543
11544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11545 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11546 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11547 IEM_MC_PREPARE_FPU_USAGE();
11548
11549 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11550 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11551 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11552
11553 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11554 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11555 IEM_MC_LOCAL(uint16_t, u16Fsw);
11556 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11557 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11558 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11559 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11560 } IEM_MC_ELSE() {
11561 IEM_MC_IF_FCW_IM() {
11562 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11563 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11564 } IEM_MC_ELSE() {
11565 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11566 } IEM_MC_ENDIF();
11567 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11568 } IEM_MC_ENDIF();
11569 IEM_MC_ADVANCE_RIP_AND_FINISH();
11570
11571 IEM_MC_END();
11572}
11573
11574
11575/** Opcode 0xdd !11/0. */
11576FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
11577{
11578 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
11579 IEM_MC_BEGIN(3, 3, 0, 0);
11580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11582
11583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11584 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11585 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11586 IEM_MC_PREPARE_FPU_USAGE();
11587
11588 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11589 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11590 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11591
11592 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11593 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11594 IEM_MC_LOCAL(uint16_t, u16Fsw);
11595 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11596 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11597 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11598 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11599 } IEM_MC_ELSE() {
11600 IEM_MC_IF_FCW_IM() {
11601 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11602 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11603 } IEM_MC_ELSE() {
11604 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11605 } IEM_MC_ENDIF();
11606 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11607 } IEM_MC_ENDIF();
11608 IEM_MC_ADVANCE_RIP_AND_FINISH();
11609
11610 IEM_MC_END();
11611}
11612
11613
11614
11615
11616/** Opcode 0xdd !11/0. */
11617FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
11618{
11619 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
11620 IEM_MC_BEGIN(3, 3, 0, 0);
11621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11623
11624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11625 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11626 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11627 IEM_MC_PREPARE_FPU_USAGE();
11628
11629 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11630 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11631 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11632
11633 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11634 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11635 IEM_MC_LOCAL(uint16_t, u16Fsw);
11636 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11637 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11638 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11639 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11640 } IEM_MC_ELSE() {
11641 IEM_MC_IF_FCW_IM() {
11642 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11643 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11644 } IEM_MC_ELSE() {
11645 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11646 } IEM_MC_ENDIF();
11647 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11648 } IEM_MC_ENDIF();
11649 IEM_MC_ADVANCE_RIP_AND_FINISH();
11650
11651 IEM_MC_END();
11652}
11653
11654
11655/** Opcode 0xdd !11/0. */
11656FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
11657{
11658 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
11659 IEM_MC_BEGIN(3, 0, 0, 0);
11660 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
11661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11662
11663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11664 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11665 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11666
11667 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11668 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11669 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
11670 IEM_MC_END();
11671}
11672
11673
11674/** Opcode 0xdd !11/0. */
11675FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
11676{
11677 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
11678 IEM_MC_BEGIN(3, 0, 0, 0);
11679 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
11680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11681
11682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11683 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11684 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
11685
11686 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11687 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11688 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
11689 IEM_MC_END();
11690}
11691
11692/** Opcode 0xdd !11/0. */
11693FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
11694{
11695 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
11696
11697 IEM_MC_BEGIN(0, 2, 0, 0);
11698 IEM_MC_LOCAL(uint16_t, u16Tmp);
11699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11700
11701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11703 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11704
11705 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11706 IEM_MC_FETCH_FSW(u16Tmp);
11707 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
11708 IEM_MC_ADVANCE_RIP_AND_FINISH();
11709
11710/** @todo Debug / drop a hint to the verifier that things may differ
11711 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
11712 * NT4SP1. (X86_FSW_PE) */
11713 IEM_MC_END();
11714}
11715
11716
11717/** Opcode 0xdd 11/0. */
11718FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
11719{
11720 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
11721 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
11722 unmodified. */
11723 IEM_MC_BEGIN(0, 0, 0, 0);
11724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11725
11726 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11727 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11728
11729 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11730 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11731 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11732
11733 IEM_MC_ADVANCE_RIP_AND_FINISH();
11734 IEM_MC_END();
11735}
11736
11737
11738/** Opcode 0xdd 11/1. */
11739FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
11740{
11741 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
11742 IEM_MC_BEGIN(0, 2, 0, 0);
11743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11744 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11745 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11746 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11747 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11748
11749 IEM_MC_PREPARE_FPU_USAGE();
11750 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11751 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11752 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11753 } IEM_MC_ELSE() {
11754 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11755 } IEM_MC_ENDIF();
11756
11757 IEM_MC_ADVANCE_RIP_AND_FINISH();
11758 IEM_MC_END();
11759}
11760
11761
11762/** Opcode 0xdd 11/3. */
11763FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
11764{
11765 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
11766 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
11767}
11768
11769
11770/** Opcode 0xdd 11/4. */
11771FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
11772{
11773 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
11774 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
11775}
11776
11777
11778/**
11779 * @opcode 0xdd
11780 */
11781FNIEMOP_DEF(iemOp_EscF5)
11782{
11783 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11784 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
11785 if (IEM_IS_MODRM_REG_MODE(bRm))
11786 {
11787 switch (IEM_GET_MODRM_REG_8(bRm))
11788 {
11789 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
11790 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
11791 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
11792 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
11793 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
11794 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
11795 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11796 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11797 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11798 }
11799 }
11800 else
11801 {
11802 switch (IEM_GET_MODRM_REG_8(bRm))
11803 {
11804 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
11805 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
11806 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
11807 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
11808 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
11809 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
11810 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
11811 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
11812 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11813 }
11814 }
11815}
11816
11817
11818/** Opcode 0xde 11/0. */
11819FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
11820{
11821 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
11822 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
11823}
11824
11825
11826/** Opcode 0xde 11/0. */
11827FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
11828{
11829 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
11830 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
11831}
11832
11833
11834/** Opcode 0xde 0xd9. */
11835FNIEMOP_DEF(iemOp_fcompp)
11836{
11837 IEMOP_MNEMONIC(fcompp, "fcompp");
11838 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
11839}
11840
11841
11842/** Opcode 0xde 11/4. */
11843FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
11844{
11845 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
11846 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
11847}
11848
11849
11850/** Opcode 0xde 11/5. */
11851FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
11852{
11853 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
11854 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
11855}
11856
11857
11858/** Opcode 0xde 11/6. */
11859FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
11860{
11861 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
11862 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
11863}
11864
11865
11866/** Opcode 0xde 11/7. */
11867FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
11868{
11869 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
11870 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
11871}
11872
11873
11874/**
11875 * Common worker for FPU instructions working on ST0 and an m16i, and storing
11876 * the result in ST0.
11877 *
11878 * @param bRm Mod R/M byte.
11879 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11880 */
11881FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
11882{
11883 IEM_MC_BEGIN(3, 3, 0, 0);
11884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11885 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11886 IEM_MC_LOCAL(int16_t, i16Val2);
11887 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11888 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11889 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11890
11891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11893
11894 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11895 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11896 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11897
11898 IEM_MC_PREPARE_FPU_USAGE();
11899 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11900 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
11901 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11902 } IEM_MC_ELSE() {
11903 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11904 } IEM_MC_ENDIF();
11905 IEM_MC_ADVANCE_RIP_AND_FINISH();
11906
11907 IEM_MC_END();
11908}
11909
11910
11911/** Opcode 0xde !11/0. */
11912FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
11913{
11914 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
11915 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
11916}
11917
11918
11919/** Opcode 0xde !11/1. */
11920FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
11921{
11922 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
11923 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
11924}
11925
11926
11927/** Opcode 0xde !11/2. */
11928FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
11929{
11930 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
11931
11932 IEM_MC_BEGIN(3, 3, 0, 0);
11933 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11934 IEM_MC_LOCAL(uint16_t, u16Fsw);
11935 IEM_MC_LOCAL(int16_t, i16Val2);
11936 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11937 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11938 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11939
11940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11942
11943 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11944 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11945 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11946
11947 IEM_MC_PREPARE_FPU_USAGE();
11948 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11949 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11950 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11951 } IEM_MC_ELSE() {
11952 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11953 } IEM_MC_ENDIF();
11954 IEM_MC_ADVANCE_RIP_AND_FINISH();
11955
11956 IEM_MC_END();
11957}
11958
11959
11960/** Opcode 0xde !11/3. */
11961FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
11962{
11963 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
11964
11965 IEM_MC_BEGIN(3, 3, 0, 0);
11966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11967 IEM_MC_LOCAL(uint16_t, u16Fsw);
11968 IEM_MC_LOCAL(int16_t, i16Val2);
11969 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11970 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11971 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11972
11973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11975
11976 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11977 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11978 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11979
11980 IEM_MC_PREPARE_FPU_USAGE();
11981 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11982 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11983 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11984 } IEM_MC_ELSE() {
11985 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11986 } IEM_MC_ENDIF();
11987 IEM_MC_ADVANCE_RIP_AND_FINISH();
11988
11989 IEM_MC_END();
11990}
11991
11992
11993/** Opcode 0xde !11/4. */
11994FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
11995{
11996 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
11997 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
11998}
11999
12000
12001/** Opcode 0xde !11/5. */
12002FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
12003{
12004 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
12005 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
12006}
12007
12008
12009/** Opcode 0xde !11/6. */
12010FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
12011{
12012 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
12013 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
12014}
12015
12016
12017/** Opcode 0xde !11/7. */
12018FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
12019{
12020 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
12021 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
12022}
12023
12024
12025/**
12026 * @opcode 0xde
12027 */
12028FNIEMOP_DEF(iemOp_EscF6)
12029{
12030 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12031 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
12032 if (IEM_IS_MODRM_REG_MODE(bRm))
12033 {
12034 switch (IEM_GET_MODRM_REG_8(bRm))
12035 {
12036 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
12037 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
12038 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
12039 case 3: if (bRm == 0xd9)
12040 return FNIEMOP_CALL(iemOp_fcompp);
12041 IEMOP_RAISE_INVALID_OPCODE_RET();
12042 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
12043 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
12044 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
12045 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
12046 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12047 }
12048 }
12049 else
12050 {
12051 switch (IEM_GET_MODRM_REG_8(bRm))
12052 {
12053 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
12054 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
12055 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
12056 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
12057 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
12058 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
12059 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
12060 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
12061 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12062 }
12063 }
12064}
12065
12066
12067/** Opcode 0xdf 11/0.
12068 * Undocument instruction, assumed to work like ffree + fincstp. */
12069FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
12070{
12071 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
12072 IEM_MC_BEGIN(0, 0, 0, 0);
12073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12074
12075 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12076 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12077
12078 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12079 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12080 IEM_MC_FPU_STACK_INC_TOP();
12081 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12082
12083 IEM_MC_ADVANCE_RIP_AND_FINISH();
12084 IEM_MC_END();
12085}
12086
12087
12088/** Opcode 0xdf 0xe0. */
12089FNIEMOP_DEF(iemOp_fnstsw_ax)
12090{
12091 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
12092 IEM_MC_BEGIN(0, 1, 0, 0);
12093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12094 IEM_MC_LOCAL(uint16_t, u16Tmp);
12095 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12096 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12097 IEM_MC_FETCH_FSW(u16Tmp);
12098 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12099 IEM_MC_ADVANCE_RIP_AND_FINISH();
12100 IEM_MC_END();
12101}
12102
12103
12104/** Opcode 0xdf 11/5. */
12105FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
12106{
12107 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
12108 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12109 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12110 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12111}
12112
12113
12114/** Opcode 0xdf 11/6. */
12115FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
12116{
12117 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
12118 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12119 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12120 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12121}
12122
12123
12124/** Opcode 0xdf !11/0. */
12125FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
12126{
12127 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
12128
12129 IEM_MC_BEGIN(2, 3, 0, 0);
12130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12131 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12132 IEM_MC_LOCAL(int16_t, i16Val);
12133 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12134 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
12135
12136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12138
12139 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12140 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12141 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12142
12143 IEM_MC_PREPARE_FPU_USAGE();
12144 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12145 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
12146 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12147 } IEM_MC_ELSE() {
12148 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12149 } IEM_MC_ENDIF();
12150 IEM_MC_ADVANCE_RIP_AND_FINISH();
12151
12152 IEM_MC_END();
12153}
12154
12155
12156/** Opcode 0xdf !11/1. */
12157FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
12158{
12159 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
12160 IEM_MC_BEGIN(3, 3, 0, 0);
12161 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12163
12164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12165 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12166 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12167 IEM_MC_PREPARE_FPU_USAGE();
12168
12169 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12170 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12171 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12172
12173 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12174 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12175 IEM_MC_LOCAL(uint16_t, u16Fsw);
12176 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12177 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12178 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12179 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12180 } IEM_MC_ELSE() {
12181 IEM_MC_IF_FCW_IM() {
12182 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12183 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12184 } IEM_MC_ELSE() {
12185 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12186 } IEM_MC_ENDIF();
12187 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12188 } IEM_MC_ENDIF();
12189 IEM_MC_ADVANCE_RIP_AND_FINISH();
12190
12191 IEM_MC_END();
12192}
12193
12194
12195/** Opcode 0xdf !11/2. */
12196FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
12197{
12198 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
12199 IEM_MC_BEGIN(3, 3, 0, 0);
12200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12202
12203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12204 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12205 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12206 IEM_MC_PREPARE_FPU_USAGE();
12207
12208 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12209 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12210 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12211
12212 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12213 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12214 IEM_MC_LOCAL(uint16_t, u16Fsw);
12215 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12216 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12217 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12218 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12219 } IEM_MC_ELSE() {
12220 IEM_MC_IF_FCW_IM() {
12221 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12222 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12223 } IEM_MC_ELSE() {
12224 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12225 } IEM_MC_ENDIF();
12226 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12227 } IEM_MC_ENDIF();
12228 IEM_MC_ADVANCE_RIP_AND_FINISH();
12229
12230 IEM_MC_END();
12231}
12232
12233
12234/** Opcode 0xdf !11/3. */
12235FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
12236{
12237 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
12238 IEM_MC_BEGIN(3, 3, 0, 0);
12239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12241
12242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12243 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12244 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12245 IEM_MC_PREPARE_FPU_USAGE();
12246
12247 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12248 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12249 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12250
12251 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12252 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12253 IEM_MC_LOCAL(uint16_t, u16Fsw);
12254 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12255 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12256 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12257 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12258 } IEM_MC_ELSE() {
12259 IEM_MC_IF_FCW_IM() {
12260 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12261 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12262 } IEM_MC_ELSE() {
12263 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12264 } IEM_MC_ENDIF();
12265 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12266 } IEM_MC_ENDIF();
12267 IEM_MC_ADVANCE_RIP_AND_FINISH();
12268
12269 IEM_MC_END();
12270}
12271
12272
12273/** Opcode 0xdf !11/4. */
12274FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
12275{
12276 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
12277
12278 IEM_MC_BEGIN(2, 3, 0, 0);
12279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12280 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12281 IEM_MC_LOCAL(RTPBCD80U, d80Val);
12282 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12283 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
12284
12285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12287
12288 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12289 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12290 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12291
12292 IEM_MC_PREPARE_FPU_USAGE();
12293 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12294 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
12295 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12296 } IEM_MC_ELSE() {
12297 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12298 } IEM_MC_ENDIF();
12299 IEM_MC_ADVANCE_RIP_AND_FINISH();
12300
12301 IEM_MC_END();
12302}
12303
12304
12305/** Opcode 0xdf !11/5. */
12306FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
12307{
12308 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
12309
12310 IEM_MC_BEGIN(2, 3, 0, 0);
12311 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12312 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12313 IEM_MC_LOCAL(int64_t, i64Val);
12314 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12315 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
12316
12317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12319
12320 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12321 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12322 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12323
12324 IEM_MC_PREPARE_FPU_USAGE();
12325 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12326 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
12327 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12328 } IEM_MC_ELSE() {
12329 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12330 } IEM_MC_ENDIF();
12331 IEM_MC_ADVANCE_RIP_AND_FINISH();
12332
12333 IEM_MC_END();
12334}
12335
12336
12337/** Opcode 0xdf !11/6. */
12338FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
12339{
12340 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
12341 IEM_MC_BEGIN(3, 3, 0, 0);
12342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12344
12345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12346 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12347 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12348 IEM_MC_PREPARE_FPU_USAGE();
12349
12350 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12351 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
12352 IEM_MC_MEM_MAP_D80_WO(pd80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12353
12354 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12355 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12356 IEM_MC_LOCAL(uint16_t, u16Fsw);
12357 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12358 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
12359 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12360 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12361 } IEM_MC_ELSE() {
12362 IEM_MC_IF_FCW_IM() {
12363 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
12364 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12365 } IEM_MC_ELSE() {
12366 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12367 } IEM_MC_ENDIF();
12368 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12369 } IEM_MC_ENDIF();
12370 IEM_MC_ADVANCE_RIP_AND_FINISH();
12371
12372 IEM_MC_END();
12373}
12374
12375
12376/** Opcode 0xdf !11/7. */
12377FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
12378{
12379 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
12380 IEM_MC_BEGIN(3, 3, 0, 0);
12381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12383
12384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12385 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12386 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12387 IEM_MC_PREPARE_FPU_USAGE();
12388
12389 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12390 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12391 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12392
12393 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12394 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12395 IEM_MC_LOCAL(uint16_t, u16Fsw);
12396 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12397 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12398 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12399 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12400 } IEM_MC_ELSE() {
12401 IEM_MC_IF_FCW_IM() {
12402 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12403 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12404 } IEM_MC_ELSE() {
12405 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12406 } IEM_MC_ENDIF();
12407 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12408 } IEM_MC_ENDIF();
12409 IEM_MC_ADVANCE_RIP_AND_FINISH();
12410
12411 IEM_MC_END();
12412}
12413
12414
12415/**
12416 * @opcode 0xdf
12417 */
12418FNIEMOP_DEF(iemOp_EscF7)
12419{
12420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12421 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
12422 if (IEM_IS_MODRM_REG_MODE(bRm))
12423 {
12424 switch (IEM_GET_MODRM_REG_8(bRm))
12425 {
12426 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
12427 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
12428 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
12429 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
12430 case 4: if (bRm == 0xe0)
12431 return FNIEMOP_CALL(iemOp_fnstsw_ax);
12432 IEMOP_RAISE_INVALID_OPCODE_RET();
12433 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
12434 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
12435 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12436 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12437 }
12438 }
12439 else
12440 {
12441 switch (IEM_GET_MODRM_REG_8(bRm))
12442 {
12443 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
12444 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
12445 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
12446 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
12447 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
12448 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
12449 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
12450 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
12451 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12452 }
12453 }
12454}
12455
12456
12457/**
12458 * @opcode 0xe0
12459 * @opfltest zf
12460 */
12461FNIEMOP_DEF(iemOp_loopne_Jb)
12462{
12463 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
12464 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12465 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12466
12467 switch (pVCpu->iem.s.enmEffAddrMode)
12468 {
12469 case IEMMODE_16BIT:
12470 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12472 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12473 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12474 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12475 } IEM_MC_ELSE() {
12476 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12477 IEM_MC_ADVANCE_RIP_AND_FINISH();
12478 } IEM_MC_ENDIF();
12479 IEM_MC_END();
12480 break;
12481
12482 case IEMMODE_32BIT:
12483 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12485 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12486 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12487 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12488 } IEM_MC_ELSE() {
12489 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12490 IEM_MC_ADVANCE_RIP_AND_FINISH();
12491 } IEM_MC_ENDIF();
12492 IEM_MC_END();
12493 break;
12494
12495 case IEMMODE_64BIT:
12496 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12498 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12499 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12500 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12501 } IEM_MC_ELSE() {
12502 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12503 IEM_MC_ADVANCE_RIP_AND_FINISH();
12504 } IEM_MC_ENDIF();
12505 IEM_MC_END();
12506 break;
12507
12508 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12509 }
12510}
12511
12512
12513/**
12514 * @opcode 0xe1
12515 * @opfltest zf
12516 */
12517FNIEMOP_DEF(iemOp_loope_Jb)
12518{
12519 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
12520 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12521 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12522
12523 switch (pVCpu->iem.s.enmEffAddrMode)
12524 {
12525 case IEMMODE_16BIT:
12526 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12528 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
12529 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12530 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12531 } IEM_MC_ELSE() {
12532 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12533 IEM_MC_ADVANCE_RIP_AND_FINISH();
12534 } IEM_MC_ENDIF();
12535 IEM_MC_END();
12536 break;
12537
12538 case IEMMODE_32BIT:
12539 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12541 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
12542 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12543 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12544 } IEM_MC_ELSE() {
12545 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12546 IEM_MC_ADVANCE_RIP_AND_FINISH();
12547 } IEM_MC_ENDIF();
12548 IEM_MC_END();
12549 break;
12550
12551 case IEMMODE_64BIT:
12552 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12554 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
12555 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12556 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12557 } IEM_MC_ELSE() {
12558 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12559 IEM_MC_ADVANCE_RIP_AND_FINISH();
12560 } IEM_MC_ENDIF();
12561 IEM_MC_END();
12562 break;
12563
12564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12565 }
12566}
12567
12568
12569/**
12570 * @opcode 0xe2
12571 */
12572FNIEMOP_DEF(iemOp_loop_Jb)
12573{
12574 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
12575 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12576 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12577
12578 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
12579 * using the 32-bit operand size override. How can that be restarted? See
12580 * weird pseudo code in intel manual. */
12581
12582 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
12583 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
12584 * the loop causes guest crashes, but when logging it's nice to skip a few million
12585 * lines of useless output. */
12586#if defined(LOG_ENABLED)
12587 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
12588 switch (pVCpu->iem.s.enmEffAddrMode)
12589 {
12590 case IEMMODE_16BIT:
12591 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12593 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
12594 IEM_MC_ADVANCE_RIP_AND_FINISH();
12595 IEM_MC_END();
12596 break;
12597
12598 case IEMMODE_32BIT:
12599 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12601 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
12602 IEM_MC_ADVANCE_RIP_AND_FINISH();
12603 IEM_MC_END();
12604 break;
12605
12606 case IEMMODE_64BIT:
12607 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12609 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
12610 IEM_MC_ADVANCE_RIP_AND_FINISH();
12611 IEM_MC_END();
12612 break;
12613
12614 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12615 }
12616#endif
12617
12618 switch (pVCpu->iem.s.enmEffAddrMode)
12619 {
12620 case IEMMODE_16BIT:
12621 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12623 IEM_MC_IF_CX_IS_NOT_ONE() {
12624 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12625 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12626 } IEM_MC_ELSE() {
12627 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
12628 IEM_MC_ADVANCE_RIP_AND_FINISH();
12629 } IEM_MC_ENDIF();
12630 IEM_MC_END();
12631 break;
12632
12633 case IEMMODE_32BIT:
12634 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12636 IEM_MC_IF_ECX_IS_NOT_ONE() {
12637 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12638 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12639 } IEM_MC_ELSE() {
12640 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
12641 IEM_MC_ADVANCE_RIP_AND_FINISH();
12642 } IEM_MC_ENDIF();
12643 IEM_MC_END();
12644 break;
12645
12646 case IEMMODE_64BIT:
12647 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12649 IEM_MC_IF_RCX_IS_NOT_ONE() {
12650 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12651 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12652 } IEM_MC_ELSE() {
12653 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
12654 IEM_MC_ADVANCE_RIP_AND_FINISH();
12655 } IEM_MC_ENDIF();
12656 IEM_MC_END();
12657 break;
12658
12659 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12660 }
12661}
12662
12663
12664/**
12665 * @opcode 0xe3
12666 */
12667FNIEMOP_DEF(iemOp_jecxz_Jb)
12668{
12669 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
12670 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12671 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12672
12673 switch (pVCpu->iem.s.enmEffAddrMode)
12674 {
12675 case IEMMODE_16BIT:
12676 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12678 IEM_MC_IF_CX_IS_NZ() {
12679 IEM_MC_ADVANCE_RIP_AND_FINISH();
12680 } IEM_MC_ELSE() {
12681 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12682 } IEM_MC_ENDIF();
12683 IEM_MC_END();
12684 break;
12685
12686 case IEMMODE_32BIT:
12687 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12689 IEM_MC_IF_ECX_IS_NZ() {
12690 IEM_MC_ADVANCE_RIP_AND_FINISH();
12691 } IEM_MC_ELSE() {
12692 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12693 } IEM_MC_ENDIF();
12694 IEM_MC_END();
12695 break;
12696
12697 case IEMMODE_64BIT:
12698 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12700 IEM_MC_IF_RCX_IS_NZ() {
12701 IEM_MC_ADVANCE_RIP_AND_FINISH();
12702 } IEM_MC_ELSE() {
12703 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12704 } IEM_MC_ENDIF();
12705 IEM_MC_END();
12706 break;
12707
12708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12709 }
12710}
12711
12712
12713/**
12714 * @opcode 0xe4
12715 * @opfltest iopl
12716 */
12717FNIEMOP_DEF(iemOp_in_AL_Ib)
12718{
12719 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
12720 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12722 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12723 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12724}
12725
12726
12727/**
12728 * @opcode 0xe5
12729 * @opfltest iopl
12730 */
12731FNIEMOP_DEF(iemOp_in_eAX_Ib)
12732{
12733 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
12734 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12736 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12737 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12738 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12739}
12740
12741
12742/**
12743 * @opcode 0xe6
12744 * @opfltest iopl
12745 */
12746FNIEMOP_DEF(iemOp_out_Ib_AL)
12747{
12748 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
12749 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12751 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12752 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12753}
12754
12755
12756/**
12757 * @opcode 0xe7
12758 * @opfltest iopl
12759 */
12760FNIEMOP_DEF(iemOp_out_Ib_eAX)
12761{
12762 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
12763 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12765 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12766 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12767 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12768}
12769
12770
12771/**
12772 * @opcode 0xe8
12773 */
12774FNIEMOP_DEF(iemOp_call_Jv)
12775{
12776 IEMOP_MNEMONIC(call_Jv, "call Jv");
12777 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12778 switch (pVCpu->iem.s.enmEffOpSize)
12779 {
12780 case IEMMODE_16BIT:
12781 {
12782 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12783 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
12784 iemCImpl_call_rel_16, (int16_t)u16Imm);
12785 }
12786
12787 case IEMMODE_32BIT:
12788 {
12789 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12790 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
12791 iemCImpl_call_rel_32, (int32_t)u32Imm);
12792 }
12793
12794 case IEMMODE_64BIT:
12795 {
12796 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12797 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
12798 iemCImpl_call_rel_64, u64Imm);
12799 }
12800
12801 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12802 }
12803}
12804
12805
12806/**
12807 * @opcode 0xe9
12808 */
12809FNIEMOP_DEF(iemOp_jmp_Jv)
12810{
12811 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
12812 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12813 switch (pVCpu->iem.s.enmEffOpSize)
12814 {
12815 case IEMMODE_16BIT:
12816 IEM_MC_BEGIN(0, 0, 0, 0);
12817 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
12818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12819 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
12820 IEM_MC_END();
12821 break;
12822
12823 case IEMMODE_64BIT:
12824 case IEMMODE_32BIT:
12825 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12826 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
12827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12828 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
12829 IEM_MC_END();
12830 break;
12831
12832 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12833 }
12834}
12835
12836
12837/**
12838 * @opcode 0xea
12839 */
12840FNIEMOP_DEF(iemOp_jmp_Ap)
12841{
12842 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
12843 IEMOP_HLP_NO_64BIT();
12844
12845 /* Decode the far pointer address and pass it on to the far call C implementation. */
12846 uint32_t off32Seg;
12847 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12848 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
12849 else
12850 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
12851 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
12852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12853 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
12854 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
12855 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
12856 /** @todo make task-switches, ring-switches, ++ return non-zero status */
12857}
12858
12859
12860/**
12861 * @opcode 0xeb
12862 */
12863FNIEMOP_DEF(iemOp_jmp_Jb)
12864{
12865 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
12866 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12867 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12868
12869 IEM_MC_BEGIN(0, 0, 0, 0);
12870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12871 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12872 IEM_MC_END();
12873}
12874
12875
12876/**
12877 * @opcode 0xec
12878 * @opfltest iopl
12879 */
12880FNIEMOP_DEF(iemOp_in_AL_DX)
12881{
12882 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
12883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12884 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12885 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12886 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
12887}
12888
12889
12890/**
12891 * @opcode 0xed
12892 * @opfltest iopl
12893 */
12894FNIEMOP_DEF(iemOp_in_eAX_DX)
12895{
12896 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
12897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12898 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12899 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12900 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12901 pVCpu->iem.s.enmEffAddrMode);
12902}
12903
12904
12905/**
12906 * @opcode 0xee
12907 * @opfltest iopl
12908 */
12909FNIEMOP_DEF(iemOp_out_DX_AL)
12910{
12911 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
12912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12913 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12914 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
12915}
12916
12917
12918/**
12919 * @opcode 0xef
12920 * @opfltest iopl
12921 */
12922FNIEMOP_DEF(iemOp_out_DX_eAX)
12923{
12924 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
12925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12926 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12927 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12928 pVCpu->iem.s.enmEffAddrMode);
12929}
12930
12931
12932/**
12933 * @opcode 0xf0
12934 */
12935FNIEMOP_DEF(iemOp_lock)
12936{
12937 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
12938 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
12939
12940 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12941 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12942}
12943
12944
12945/**
12946 * @opcode 0xf1
12947 */
12948FNIEMOP_DEF(iemOp_int1)
12949{
12950 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
12951 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
12952 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
12953 * LOADALL memo. Needs some testing. */
12954 IEMOP_HLP_MIN_386();
12955 /** @todo testcase! */
12956 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
12957 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
12958 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
12959}
12960
12961
12962/**
12963 * @opcode 0xf2
12964 */
12965FNIEMOP_DEF(iemOp_repne)
12966{
12967 /* This overrides any previous REPE prefix. */
12968 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
12969 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
12970 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
12971
12972 /* For the 4 entry opcode tables, REPNZ overrides any previous
12973 REPZ and operand size prefixes. */
12974 pVCpu->iem.s.idxPrefix = 3;
12975
12976 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12977 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12978}
12979
12980
12981/**
12982 * @opcode 0xf3
12983 */
12984FNIEMOP_DEF(iemOp_repe)
12985{
12986 /* This overrides any previous REPNE prefix. */
12987 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
12988 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
12989 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
12990
12991 /* For the 4 entry opcode tables, REPNZ overrides any previous
12992 REPNZ and operand size prefixes. */
12993 pVCpu->iem.s.idxPrefix = 2;
12994
12995 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12996 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12997}
12998
12999
13000/**
13001 * @opcode 0xf4
13002 */
13003FNIEMOP_DEF(iemOp_hlt)
13004{
13005 IEMOP_MNEMONIC(hlt, "hlt");
13006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13007 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_hlt);
13008}
13009
13010
13011/**
13012 * @opcode 0xf5
13013 * @opflmodify cf
13014 */
13015FNIEMOP_DEF(iemOp_cmc)
13016{
13017 IEMOP_MNEMONIC(cmc, "cmc");
13018 IEM_MC_BEGIN(0, 0, 0, 0);
13019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13020 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
13021 IEM_MC_ADVANCE_RIP_AND_FINISH();
13022 IEM_MC_END();
13023}
13024
13025
13026/**
13027 * Body for of 'inc/dec/not/neg Eb'.
13028 */
13029#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
13030 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
13031 { \
13032 /* register access */ \
13033 IEM_MC_BEGIN(2, 0, 0, 0); \
13034 IEMOP_HLP_DONE_DECODING(); \
13035 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13036 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13037 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
13038 IEM_MC_REF_EFLAGS(pEFlags); \
13039 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
13040 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13041 IEM_MC_END(); \
13042 } \
13043 else \
13044 { \
13045 /* memory access. */ \
13046 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
13047 { \
13048 IEM_MC_BEGIN(2, 2, 0, 0); \
13049 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13050 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13052 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13053 \
13054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
13055 IEMOP_HLP_DONE_DECODING(); \
13056 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13057 IEM_MC_FETCH_EFLAGS(EFlags); \
13058 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
13059 \
13060 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13061 IEM_MC_COMMIT_EFLAGS(EFlags); \
13062 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13063 IEM_MC_END(); \
13064 } \
13065 else \
13066 { \
13067 IEM_MC_BEGIN(2, 2, 0, 0); \
13068 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13069 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13070 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13071 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13072 \
13073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
13074 IEMOP_HLP_DONE_DECODING(); \
13075 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13076 IEM_MC_FETCH_EFLAGS(EFlags); \
13077 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
13078 \
13079 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13080 IEM_MC_COMMIT_EFLAGS(EFlags); \
13081 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13082 IEM_MC_END(); \
13083 } \
13084 } \
13085 (void)0
13086
13087
13088/**
13089 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
13090 */
13091#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
13092 if (IEM_IS_MODRM_REG_MODE(bRm)) \
13093 { \
13094 /* \
13095 * Register target \
13096 */ \
13097 switch (pVCpu->iem.s.enmEffOpSize) \
13098 { \
13099 case IEMMODE_16BIT: \
13100 IEM_MC_BEGIN(2, 0, 0, 0); \
13101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13102 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13103 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13104 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13105 IEM_MC_REF_EFLAGS(pEFlags); \
13106 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13107 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13108 IEM_MC_END(); \
13109 break; \
13110 \
13111 case IEMMODE_32BIT: \
13112 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0); \
13113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13114 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13115 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13116 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13117 IEM_MC_REF_EFLAGS(pEFlags); \
13118 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13119 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
13120 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13121 IEM_MC_END(); \
13122 break; \
13123 \
13124 case IEMMODE_64BIT: \
13125 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0); \
13126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13127 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13128 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13129 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13130 IEM_MC_REF_EFLAGS(pEFlags); \
13131 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13132 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13133 IEM_MC_END(); \
13134 break; \
13135 \
13136 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13137 } \
13138 } \
13139 else \
13140 { \
13141 /* \
13142 * Memory target. \
13143 */ \
13144 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
13145 { \
13146 switch (pVCpu->iem.s.enmEffOpSize) \
13147 { \
13148 case IEMMODE_16BIT: \
13149 IEM_MC_BEGIN(2, 3, 0, 0); \
13150 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13151 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13152 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13153 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13154 \
13155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13156 IEMOP_HLP_DONE_DECODING(); \
13157 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13158 IEM_MC_FETCH_EFLAGS(EFlags); \
13159 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13160 \
13161 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13162 IEM_MC_COMMIT_EFLAGS(EFlags); \
13163 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13164 IEM_MC_END(); \
13165 break; \
13166 \
13167 case IEMMODE_32BIT: \
13168 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13169 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13170 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13172 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13173 \
13174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13175 IEMOP_HLP_DONE_DECODING(); \
13176 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13177 IEM_MC_FETCH_EFLAGS(EFlags); \
13178 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13179 \
13180 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13181 IEM_MC_COMMIT_EFLAGS(EFlags); \
13182 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13183 IEM_MC_END(); \
13184 break; \
13185 \
13186 case IEMMODE_64BIT: \
13187 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13188 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13189 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13191 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13192 \
13193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13194 IEMOP_HLP_DONE_DECODING(); \
13195 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13196 IEM_MC_FETCH_EFLAGS(EFlags); \
13197 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13198 \
13199 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13200 IEM_MC_COMMIT_EFLAGS(EFlags); \
13201 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13202 IEM_MC_END(); \
13203 break; \
13204 \
13205 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13206 } \
13207 } \
13208 else \
13209 { \
13210 (void)0
13211
13212#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
13213 switch (pVCpu->iem.s.enmEffOpSize) \
13214 { \
13215 case IEMMODE_16BIT: \
13216 IEM_MC_BEGIN(2, 3, 0, 0); \
13217 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13218 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13220 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13221 \
13222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13223 IEMOP_HLP_DONE_DECODING(); \
13224 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13225 IEM_MC_FETCH_EFLAGS(EFlags); \
13226 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
13227 \
13228 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13229 IEM_MC_COMMIT_EFLAGS(EFlags); \
13230 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13231 IEM_MC_END(); \
13232 break; \
13233 \
13234 case IEMMODE_32BIT: \
13235 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13236 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13237 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13239 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13240 \
13241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13242 IEMOP_HLP_DONE_DECODING(); \
13243 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13244 IEM_MC_FETCH_EFLAGS(EFlags); \
13245 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
13246 \
13247 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13248 IEM_MC_COMMIT_EFLAGS(EFlags); \
13249 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13250 IEM_MC_END(); \
13251 break; \
13252 \
13253 case IEMMODE_64BIT: \
13254 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13255 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13256 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13258 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13259 \
13260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13261 IEMOP_HLP_DONE_DECODING(); \
13262 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13263 IEM_MC_FETCH_EFLAGS(EFlags); \
13264 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
13265 \
13266 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13267 IEM_MC_COMMIT_EFLAGS(EFlags); \
13268 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13269 IEM_MC_END(); \
13270 break; \
13271 \
13272 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13273 } \
13274 } \
13275 } \
13276 (void)0
13277
13278
13279/**
13280 * @opmaps grp3_f6
13281 * @opcode /0
13282 * @opflclass logical
13283 * @todo also /1
13284 */
13285FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
13286{
13287 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
13288 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
13289
13290 if (IEM_IS_MODRM_REG_MODE(bRm))
13291 {
13292 /* register access */
13293 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13294 IEM_MC_BEGIN(3, 0, 0, 0);
13295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13296 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13297 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
13298 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13299 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13300 IEM_MC_REF_EFLAGS(pEFlags);
13301 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13302 IEM_MC_ADVANCE_RIP_AND_FINISH();
13303 IEM_MC_END();
13304 }
13305 else
13306 {
13307 /* memory access. */
13308 IEM_MC_BEGIN(3, 3, 0, 0);
13309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13310 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13311
13312 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13314
13315 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13316 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
13317 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13318
13319 IEM_MC_ARG_CONST(uint8_t, u8Src, u8Imm, 1);
13320 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13321 IEM_MC_FETCH_EFLAGS(EFlags);
13322 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13323
13324 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
13325 IEM_MC_COMMIT_EFLAGS(EFlags);
13326 IEM_MC_ADVANCE_RIP_AND_FINISH();
13327 IEM_MC_END();
13328 }
13329}
13330
13331
13332/** Opcode 0xf6 /4, /5, /6 and /7. */
13333FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
13334{
13335 if (IEM_IS_MODRM_REG_MODE(bRm))
13336 {
13337 /* register access */
13338 IEM_MC_BEGIN(3, 1, 0, 0);
13339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13340 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13341 IEM_MC_ARG(uint8_t, u8Value, 1);
13342 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13343 IEM_MC_LOCAL(int32_t, rc);
13344
13345 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13346 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13347 IEM_MC_REF_EFLAGS(pEFlags);
13348 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
13349 IEM_MC_IF_LOCAL_IS_Z(rc) {
13350 IEM_MC_ADVANCE_RIP_AND_FINISH();
13351 } IEM_MC_ELSE() {
13352 IEM_MC_RAISE_DIVIDE_ERROR();
13353 } IEM_MC_ENDIF();
13354
13355 IEM_MC_END();
13356 }
13357 else
13358 {
13359 /* memory access. */
13360 IEM_MC_BEGIN(3, 2, 0, 0);
13361 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13362 IEM_MC_ARG(uint8_t, u8Value, 1);
13363 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13365 IEM_MC_LOCAL(int32_t, rc);
13366
13367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13369 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13370 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13371 IEM_MC_REF_EFLAGS(pEFlags);
13372 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
13373 IEM_MC_IF_LOCAL_IS_Z(rc) {
13374 IEM_MC_ADVANCE_RIP_AND_FINISH();
13375 } IEM_MC_ELSE() {
13376 IEM_MC_RAISE_DIVIDE_ERROR();
13377 } IEM_MC_ENDIF();
13378
13379 IEM_MC_END();
13380 }
13381}
13382
13383
13384/** Opcode 0xf7 /4, /5, /6 and /7. */
13385FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
13386{
13387 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13388
13389 if (IEM_IS_MODRM_REG_MODE(bRm))
13390 {
13391 /* register access */
13392 switch (pVCpu->iem.s.enmEffOpSize)
13393 {
13394 case IEMMODE_16BIT:
13395 IEM_MC_BEGIN(4, 1, 0, 0);
13396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13397 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13398 IEM_MC_ARG(uint16_t *, pu16DX, 1);
13399 IEM_MC_ARG(uint16_t, u16Value, 2);
13400 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13401 IEM_MC_LOCAL(int32_t, rc);
13402
13403 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13404 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13405 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
13406 IEM_MC_REF_EFLAGS(pEFlags);
13407 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
13408 IEM_MC_IF_LOCAL_IS_Z(rc) {
13409 IEM_MC_ADVANCE_RIP_AND_FINISH();
13410 } IEM_MC_ELSE() {
13411 IEM_MC_RAISE_DIVIDE_ERROR();
13412 } IEM_MC_ENDIF();
13413
13414 IEM_MC_END();
13415 break;
13416
13417 case IEMMODE_32BIT:
13418 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386, 0);
13419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13420 IEM_MC_ARG(uint32_t *, pu32AX, 0);
13421 IEM_MC_ARG(uint32_t *, pu32DX, 1);
13422 IEM_MC_ARG(uint32_t, u32Value, 2);
13423 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13424 IEM_MC_LOCAL(int32_t, rc);
13425
13426 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13427 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
13428 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
13429 IEM_MC_REF_EFLAGS(pEFlags);
13430 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
13431 IEM_MC_IF_LOCAL_IS_Z(rc) {
13432 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
13433 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
13434 IEM_MC_ADVANCE_RIP_AND_FINISH();
13435 } IEM_MC_ELSE() {
13436 IEM_MC_RAISE_DIVIDE_ERROR();
13437 } IEM_MC_ENDIF();
13438
13439 IEM_MC_END();
13440 break;
13441
13442 case IEMMODE_64BIT:
13443 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0);
13444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13445 IEM_MC_ARG(uint64_t *, pu64AX, 0);
13446 IEM_MC_ARG(uint64_t *, pu64DX, 1);
13447 IEM_MC_ARG(uint64_t, u64Value, 2);
13448 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13449 IEM_MC_LOCAL(int32_t, rc);
13450
13451 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13452 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
13453 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
13454 IEM_MC_REF_EFLAGS(pEFlags);
13455 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
13456 IEM_MC_IF_LOCAL_IS_Z(rc) {
13457 IEM_MC_ADVANCE_RIP_AND_FINISH();
13458 } IEM_MC_ELSE() {
13459 IEM_MC_RAISE_DIVIDE_ERROR();
13460 } IEM_MC_ENDIF();
13461
13462 IEM_MC_END();
13463 break;
13464
13465 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13466 }
13467 }
13468 else
13469 {
13470 /* memory access. */
13471 switch (pVCpu->iem.s.enmEffOpSize)
13472 {
13473 case IEMMODE_16BIT:
13474 IEM_MC_BEGIN(4, 2, 0, 0);
13475 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13476 IEM_MC_ARG(uint16_t *, pu16DX, 1);
13477 IEM_MC_ARG(uint16_t, u16Value, 2);
13478 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13480 IEM_MC_LOCAL(int32_t, rc);
13481
13482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13484 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13485 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13486 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
13487 IEM_MC_REF_EFLAGS(pEFlags);
13488 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
13489 IEM_MC_IF_LOCAL_IS_Z(rc) {
13490 IEM_MC_ADVANCE_RIP_AND_FINISH();
13491 } IEM_MC_ELSE() {
13492 IEM_MC_RAISE_DIVIDE_ERROR();
13493 } IEM_MC_ENDIF();
13494
13495 IEM_MC_END();
13496 break;
13497
13498 case IEMMODE_32BIT:
13499 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386, 0);
13500 IEM_MC_ARG(uint32_t *, pu32AX, 0);
13501 IEM_MC_ARG(uint32_t *, pu32DX, 1);
13502 IEM_MC_ARG(uint32_t, u32Value, 2);
13503 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13505 IEM_MC_LOCAL(int32_t, rc);
13506
13507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13509 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13510 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
13511 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
13512 IEM_MC_REF_EFLAGS(pEFlags);
13513 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
13514 IEM_MC_IF_LOCAL_IS_Z(rc) {
13515 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
13516 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
13517 IEM_MC_ADVANCE_RIP_AND_FINISH();
13518 } IEM_MC_ELSE() {
13519 IEM_MC_RAISE_DIVIDE_ERROR();
13520 } IEM_MC_ENDIF();
13521
13522 IEM_MC_END();
13523 break;
13524
13525 case IEMMODE_64BIT:
13526 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT, 0);
13527 IEM_MC_ARG(uint64_t *, pu64AX, 0);
13528 IEM_MC_ARG(uint64_t *, pu64DX, 1);
13529 IEM_MC_ARG(uint64_t, u64Value, 2);
13530 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13531 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13532 IEM_MC_LOCAL(int32_t, rc);
13533
13534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13536 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13537 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
13538 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
13539 IEM_MC_REF_EFLAGS(pEFlags);
13540 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
13541 IEM_MC_IF_LOCAL_IS_Z(rc) {
13542 IEM_MC_ADVANCE_RIP_AND_FINISH();
13543 } IEM_MC_ELSE() {
13544 IEM_MC_RAISE_DIVIDE_ERROR();
13545 } IEM_MC_ENDIF();
13546
13547 IEM_MC_END();
13548 break;
13549
13550 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13551 }
13552 }
13553}
13554
13555
13556/**
13557 * @opmaps grp3_f6
13558 * @opcode /2
13559 * @opflclass unchanged
13560 */
13561FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
13562{
13563/** @todo does not modify EFLAGS. */
13564 IEMOP_MNEMONIC(not_Eb, "not Eb");
13565 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
13566}
13567
13568
13569/**
13570 * @opmaps grp3_f6
13571 * @opcode /3
13572 * @opflclass arithmetic
13573 */
13574FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
13575{
13576 IEMOP_MNEMONIC(net_Eb, "neg Eb");
13577 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
13578}
13579
13580
13581/**
13582 * @opcode 0xf6
13583 */
13584FNIEMOP_DEF(iemOp_Grp3_Eb)
13585{
13586 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13587 switch (IEM_GET_MODRM_REG_8(bRm))
13588 {
13589 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
13590 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
13591 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
13592 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
13593 case 4:
13594 /**
13595 * @opdone
13596 * @opmaps grp3_f6
13597 * @opcode /4
13598 * @opflclass multiply
13599 */
13600 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
13601 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13602 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
13603 case 5:
13604 /**
13605 * @opdone
13606 * @opmaps grp3_f6
13607 * @opcode /5
13608 * @opflclass multiply
13609 */
13610 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
13611 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13612 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
13613 case 6:
13614 /**
13615 * @opdone
13616 * @opmaps grp3_f6
13617 * @opcode /6
13618 * @opflclass division
13619 */
13620 IEMOP_MNEMONIC(div_Eb, "div Eb");
13621 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13622 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
13623 case 7:
13624 /**
13625 * @opdone
13626 * @opmaps grp3_f6
13627 * @opcode /7
13628 * @opflclass division
13629 */
13630 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
13631 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13632 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
13633 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13634 }
13635}
13636
13637
13638/**
13639 * @opmaps grp3_f7
13640 * @opcode /0
13641 * @opflclass logical
13642 */
13643FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
13644{
13645 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
13646 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
13647
13648 if (IEM_IS_MODRM_REG_MODE(bRm))
13649 {
13650 /* register access */
13651 switch (pVCpu->iem.s.enmEffOpSize)
13652 {
13653 case IEMMODE_16BIT:
13654 IEM_MC_BEGIN(3, 0, 0, 0);
13655 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13657 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13658 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
13659 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13660 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13661 IEM_MC_REF_EFLAGS(pEFlags);
13662 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13663 IEM_MC_ADVANCE_RIP_AND_FINISH();
13664 IEM_MC_END();
13665 break;
13666
13667 case IEMMODE_32BIT:
13668 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
13669 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13671 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13672 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
13673 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13674 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13675 IEM_MC_REF_EFLAGS(pEFlags);
13676 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13677 /* No clearing the high dword here - test doesn't write back the result. */
13678 IEM_MC_ADVANCE_RIP_AND_FINISH();
13679 IEM_MC_END();
13680 break;
13681
13682 case IEMMODE_64BIT:
13683 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
13684 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13686 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13687 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
13688 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13689 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13690 IEM_MC_REF_EFLAGS(pEFlags);
13691 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13692 IEM_MC_ADVANCE_RIP_AND_FINISH();
13693 IEM_MC_END();
13694 break;
13695
13696 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13697 }
13698 }
13699 else
13700 {
13701 /* memory access. */
13702 switch (pVCpu->iem.s.enmEffOpSize)
13703 {
13704 case IEMMODE_16BIT:
13705 IEM_MC_BEGIN(3, 3, 0, 0);
13706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13708
13709 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13711
13712 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13713 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
13714 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13715
13716 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
13717 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13718 IEM_MC_FETCH_EFLAGS(EFlags);
13719 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13720
13721 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
13722 IEM_MC_COMMIT_EFLAGS(EFlags);
13723 IEM_MC_ADVANCE_RIP_AND_FINISH();
13724 IEM_MC_END();
13725 break;
13726
13727 case IEMMODE_32BIT:
13728 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
13729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13731
13732 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13734
13735 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13736 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
13737 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13738
13739 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
13740 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13741 IEM_MC_FETCH_EFLAGS(EFlags);
13742 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13743
13744 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
13745 IEM_MC_COMMIT_EFLAGS(EFlags);
13746 IEM_MC_ADVANCE_RIP_AND_FINISH();
13747 IEM_MC_END();
13748 break;
13749
13750 case IEMMODE_64BIT:
13751 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
13752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13754
13755 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13757
13758 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
13759 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13760 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13761
13762 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1);
13763 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13764 IEM_MC_FETCH_EFLAGS(EFlags);
13765 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13766
13767 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
13768 IEM_MC_COMMIT_EFLAGS(EFlags);
13769 IEM_MC_ADVANCE_RIP_AND_FINISH();
13770 IEM_MC_END();
13771 break;
13772
13773 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13774 }
13775 }
13776}
13777
13778
13779/**
13780 * @opmaps grp3_f7
13781 * @opcode /2
13782 * @opflclass unchanged
13783 */
13784FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
13785{
13786/** @todo does not modify EFLAGS */
13787 IEMOP_MNEMONIC(not_Ev, "not Ev");
13788 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
13789 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
13790}
13791
13792
13793/**
13794 * @opmaps grp3_f7
13795 * @opcode /3
13796 * @opflclass arithmetic
13797 */
13798FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
13799{
13800 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
13801 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
13802 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
13803}
13804
13805
13806/**
13807 * @opcode 0xf7
13808 */
13809FNIEMOP_DEF(iemOp_Grp3_Ev)
13810{
13811 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13812 switch (IEM_GET_MODRM_REG_8(bRm))
13813 {
13814 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13815 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13816 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
13817 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
13818 case 4:
13819 /**
13820 * @opdone
13821 * @opmaps grp3_f7
13822 * @opcode /4
13823 * @opflclass multiply
13824 */
13825 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
13826 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13827 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
13828 case 5:
13829 /**
13830 * @opdone
13831 * @opmaps grp3_f7
13832 * @opcode /5
13833 * @opflclass multiply
13834 */
13835 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
13836 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13837 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
13838 case 6:
13839 /**
13840 * @opdone
13841 * @opmaps grp3_f7
13842 * @opcode /6
13843 * @opflclass division
13844 */
13845 IEMOP_MNEMONIC(div_Ev, "div Ev");
13846 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13847 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
13848 case 7:
13849 /**
13850 * @opdone
13851 * @opmaps grp3_f7
13852 * @opcode /7
13853 * @opflclass division
13854 */
13855 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
13856 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13857 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
13858 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13859 }
13860}
13861
13862
13863/**
13864 * @opcode 0xf8
13865 * @opflmodify cf
13866 * @opflclear cf
13867 */
13868FNIEMOP_DEF(iemOp_clc)
13869{
13870 IEMOP_MNEMONIC(clc, "clc");
13871 IEM_MC_BEGIN(0, 0, 0, 0);
13872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13873 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
13874 IEM_MC_ADVANCE_RIP_AND_FINISH();
13875 IEM_MC_END();
13876}
13877
13878
13879/**
13880 * @opcode 0xf9
13881 * @opflmodify cf
13882 * @opflset cf
13883 */
13884FNIEMOP_DEF(iemOp_stc)
13885{
13886 IEMOP_MNEMONIC(stc, "stc");
13887 IEM_MC_BEGIN(0, 0, 0, 0);
13888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13889 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
13890 IEM_MC_ADVANCE_RIP_AND_FINISH();
13891 IEM_MC_END();
13892}
13893
13894
13895/**
13896 * @opcode 0xfa
13897 * @opfltest iopl,vm
13898 * @opflmodify if,vif
13899 */
13900FNIEMOP_DEF(iemOp_cli)
13901{
13902 IEMOP_MNEMONIC(cli, "cli");
13903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13904 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, 0, iemCImpl_cli);
13905}
13906
13907
13908/**
13909 * @opcode 0xfb
13910 * @opfltest iopl,vm
13911 * @opflmodify if,vif
13912 */
13913FNIEMOP_DEF(iemOp_sti)
13914{
13915 IEMOP_MNEMONIC(sti, "sti");
13916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13917 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
13918 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, 0, iemCImpl_sti);
13919}
13920
13921
13922/**
13923 * @opcode 0xfc
13924 * @opflmodify df
13925 * @opflclear df
13926 */
13927FNIEMOP_DEF(iemOp_cld)
13928{
13929 IEMOP_MNEMONIC(cld, "cld");
13930 IEM_MC_BEGIN(0, 0, 0, 0);
13931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13932 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
13933 IEM_MC_ADVANCE_RIP_AND_FINISH();
13934 IEM_MC_END();
13935}
13936
13937
13938/**
13939 * @opcode 0xfd
13940 * @opflmodify df
13941 * @opflset df
13942 */
13943FNIEMOP_DEF(iemOp_std)
13944{
13945 IEMOP_MNEMONIC(std, "std");
13946 IEM_MC_BEGIN(0, 0, 0, 0);
13947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13948 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
13949 IEM_MC_ADVANCE_RIP_AND_FINISH();
13950 IEM_MC_END();
13951}
13952
13953
13954/**
13955 * @opmaps grp4
13956 * @opcode /0
13957 * @opflclass incdec
13958 */
13959FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
13960{
13961 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
13962 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
13963}
13964
13965
13966/**
13967 * @opmaps grp4
13968 * @opcode /1
13969 * @opflclass incdec
13970 */
13971FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
13972{
13973 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
13974 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
13975}
13976
13977
13978/**
13979 * @opcode 0xfe
13980 */
13981FNIEMOP_DEF(iemOp_Grp4)
13982{
13983 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13984 switch (IEM_GET_MODRM_REG_8(bRm))
13985 {
13986 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
13987 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
13988 default:
13989 /** @todo is the eff-addr decoded? */
13990 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
13991 IEMOP_RAISE_INVALID_OPCODE_RET();
13992 }
13993}
13994
13995/**
13996 * @opmaps grp5
13997 * @opcode /0
13998 * @opflclass incdec
13999 */
14000FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
14001{
14002 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
14003 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
14004 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
14005}
14006
14007
14008/**
14009 * @opmaps grp5
14010 * @opcode /1
14011 * @opflclass incdec
14012 */
14013FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
14014{
14015 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
14016 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
14017 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
14018}
14019
14020
14021/**
14022 * Opcode 0xff /2.
14023 * @param bRm The RM byte.
14024 */
14025FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
14026{
14027 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
14028 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14029
14030 if (IEM_IS_MODRM_REG_MODE(bRm))
14031 {
14032 /* The new RIP is taken from a register. */
14033 switch (pVCpu->iem.s.enmEffOpSize)
14034 {
14035 case IEMMODE_16BIT:
14036 IEM_MC_BEGIN(1, 0, 0, 0);
14037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14038 IEM_MC_ARG(uint16_t, u16Target, 0);
14039 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14040 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
14041 IEM_MC_END();
14042 break;
14043
14044 case IEMMODE_32BIT:
14045 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
14046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14047 IEM_MC_ARG(uint32_t, u32Target, 0);
14048 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14049 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
14050 IEM_MC_END();
14051 break;
14052
14053 case IEMMODE_64BIT:
14054 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
14055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14056 IEM_MC_ARG(uint64_t, u64Target, 0);
14057 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14058 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
14059 IEM_MC_END();
14060 break;
14061
14062 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14063 }
14064 }
14065 else
14066 {
14067 /* The new RIP is taken from a register. */
14068 switch (pVCpu->iem.s.enmEffOpSize)
14069 {
14070 case IEMMODE_16BIT:
14071 IEM_MC_BEGIN(1, 1, 0, 0);
14072 IEM_MC_ARG(uint16_t, u16Target, 0);
14073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14076 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14077 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
14078 IEM_MC_END();
14079 break;
14080
14081 case IEMMODE_32BIT:
14082 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
14083 IEM_MC_ARG(uint32_t, u32Target, 0);
14084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14087 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14088 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
14089 IEM_MC_END();
14090 break;
14091
14092 case IEMMODE_64BIT:
14093 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT, 0);
14094 IEM_MC_ARG(uint64_t, u64Target, 0);
14095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14098 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14099 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
14100 IEM_MC_END();
14101 break;
14102
14103 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14104 }
14105 }
14106}
14107
14108#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl, a_fCImplExtra) \
14109 /* Registers? How?? */ \
14110 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
14111 { /* likely */ } \
14112 else \
14113 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
14114 \
14115 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
14116 /** @todo what does VIA do? */ \
14117 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
14118 { /* likely */ } \
14119 else \
14120 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
14121 \
14122 /* Far pointer loaded from memory. */ \
14123 switch (pVCpu->iem.s.enmEffOpSize) \
14124 { \
14125 case IEMMODE_16BIT: \
14126 IEM_MC_BEGIN(3, 1, 0, 0); \
14127 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14128 IEM_MC_ARG(uint16_t, offSeg, 1); \
14129 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
14130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14133 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14134 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
14135 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14136 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
14137 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14138 IEM_MC_END(); \
14139 break; \
14140 \
14141 case IEMMODE_32BIT: \
14142 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
14143 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14144 IEM_MC_ARG(uint32_t, offSeg, 1); \
14145 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
14146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14149 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14150 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
14151 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14152 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
14153 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14154 IEM_MC_END(); \
14155 break; \
14156 \
14157 case IEMMODE_64BIT: \
14158 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
14159 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0); \
14160 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14161 IEM_MC_ARG(uint64_t, offSeg, 1); \
14162 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
14163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14166 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14167 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
14168 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14169 | IEM_CIMPL_F_MODE /* no gates */, 0, \
14170 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14171 IEM_MC_END(); \
14172 break; \
14173 \
14174 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14175 } do {} while (0)
14176
14177
14178/**
14179 * Opcode 0xff /3.
14180 * @param bRm The RM byte.
14181 */
14182FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
14183{
14184 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
14185 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf, IEM_CIMPL_F_BRANCH_STACK);
14186}
14187
14188
14189/**
14190 * Opcode 0xff /4.
14191 * @param bRm The RM byte.
14192 */
14193FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
14194{
14195 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
14196 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14197
14198 if (IEM_IS_MODRM_REG_MODE(bRm))
14199 {
14200 /* The new RIP is taken from a register. */
14201 switch (pVCpu->iem.s.enmEffOpSize)
14202 {
14203 case IEMMODE_16BIT:
14204 IEM_MC_BEGIN(0, 1, 0, 0);
14205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14206 IEM_MC_LOCAL(uint16_t, u16Target);
14207 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14208 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14209 IEM_MC_END();
14210 break;
14211
14212 case IEMMODE_32BIT:
14213 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
14214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14215 IEM_MC_LOCAL(uint32_t, u32Target);
14216 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14217 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14218 IEM_MC_END();
14219 break;
14220
14221 case IEMMODE_64BIT:
14222 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
14223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14224 IEM_MC_LOCAL(uint64_t, u64Target);
14225 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14226 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14227 IEM_MC_END();
14228 break;
14229
14230 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14231 }
14232 }
14233 else
14234 {
14235 /* The new RIP is taken from a memory location. */
14236 switch (pVCpu->iem.s.enmEffOpSize)
14237 {
14238 case IEMMODE_16BIT:
14239 IEM_MC_BEGIN(0, 2, 0, 0);
14240 IEM_MC_LOCAL(uint16_t, u16Target);
14241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14244 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14245 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14246 IEM_MC_END();
14247 break;
14248
14249 case IEMMODE_32BIT:
14250 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
14251 IEM_MC_LOCAL(uint32_t, u32Target);
14252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14255 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14256 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14257 IEM_MC_END();
14258 break;
14259
14260 case IEMMODE_64BIT:
14261 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14262 IEM_MC_LOCAL(uint64_t, u64Target);
14263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14266 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14267 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14268 IEM_MC_END();
14269 break;
14270
14271 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14272 }
14273 }
14274}
14275
14276
14277/**
14278 * Opcode 0xff /5.
14279 * @param bRm The RM byte.
14280 */
14281FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
14282{
14283 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
14284 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp, 0);
14285}
14286
14287
14288/**
14289 * Opcode 0xff /6.
14290 * @param bRm The RM byte.
14291 */
14292FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
14293{
14294 IEMOP_MNEMONIC(push_Ev, "push Ev");
14295
14296 /* Registers are handled by a common worker. */
14297 if (IEM_IS_MODRM_REG_MODE(bRm))
14298 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
14299
14300 /* Memory we do here. */
14301 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14302 switch (pVCpu->iem.s.enmEffOpSize)
14303 {
14304 case IEMMODE_16BIT:
14305 IEM_MC_BEGIN(0, 2, 0, 0);
14306 IEM_MC_LOCAL(uint16_t, u16Src);
14307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14310 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14311 IEM_MC_PUSH_U16(u16Src);
14312 IEM_MC_ADVANCE_RIP_AND_FINISH();
14313 IEM_MC_END();
14314 break;
14315
14316 case IEMMODE_32BIT:
14317 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
14318 IEM_MC_LOCAL(uint32_t, u32Src);
14319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14322 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14323 IEM_MC_PUSH_U32(u32Src);
14324 IEM_MC_ADVANCE_RIP_AND_FINISH();
14325 IEM_MC_END();
14326 break;
14327
14328 case IEMMODE_64BIT:
14329 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14330 IEM_MC_LOCAL(uint64_t, u64Src);
14331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14334 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14335 IEM_MC_PUSH_U64(u64Src);
14336 IEM_MC_ADVANCE_RIP_AND_FINISH();
14337 IEM_MC_END();
14338 break;
14339
14340 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14341 }
14342}
14343
14344
14345/**
14346 * @opcode 0xff
14347 */
14348FNIEMOP_DEF(iemOp_Grp5)
14349{
14350 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14351 switch (IEM_GET_MODRM_REG_8(bRm))
14352 {
14353 case 0:
14354 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
14355 case 1:
14356 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
14357 case 2:
14358 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
14359 case 3:
14360 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
14361 case 4:
14362 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
14363 case 5:
14364 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
14365 case 6:
14366 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
14367 case 7:
14368 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
14369 IEMOP_RAISE_INVALID_OPCODE_RET();
14370 }
14371 AssertFailedReturn(VERR_IEM_IPE_3);
14372}
14373
14374
14375
14376const PFNIEMOP g_apfnOneByteMap[256] =
14377{
14378 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
14379 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
14380 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
14381 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
14382 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
14383 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
14384 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
14385 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
14386 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
14387 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
14388 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
14389 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
14390 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
14391 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
14392 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
14393 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
14394 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
14395 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
14396 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
14397 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
14398 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
14399 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
14400 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
14401 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
14402 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
14403 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
14404 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
14405 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
14406 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
14407 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
14408 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
14409 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
14410 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
14411 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
14412 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
14413 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
14414 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
14415 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
14416 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
14417 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
14418 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
14419 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
14420 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
14421 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
14422 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
14423 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
14424 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
14425 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
14426 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
14427 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
14428 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
14429 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
14430 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
14431 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
14432 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
14433 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
14434 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
14435 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
14436 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
14437 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
14438 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
14439 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
14440 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
14441 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
14442};
14443
14444
14445/** @} */
14446
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette