VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 101954

Last change on this file since 101954 was 101954, checked in by vboxsync, 16 months ago

VMM/IEM: Eliminated IEM_MC_ASSIGN_U8_SX_U64 and IEM_MC_ASSIGN_U32_SX_U64. bugref:10371

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 490.1 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 101954 2023-11-08 02:31:04Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 *
63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
64 */
65#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8) \
66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
67 \
68 /* \
69 * If rm is denoting a register, no more instruction bytes. \
70 */ \
71 if (IEM_IS_MODRM_REG_MODE(bRm)) \
72 { \
73 IEM_MC_BEGIN(3, 0, 0, 0); \
74 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
75 IEM_MC_ARG(uint8_t, u8Src, 1); \
76 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
77 \
78 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
79 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
80 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
81 IEM_MC_REF_EFLAGS(pEFlags); \
82 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
83 \
84 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
85 IEM_MC_END(); \
86 } \
87 else \
88 { \
89 /* \
90 * We're accessing memory. \
91 * Note! We're putting the eflags on the stack here so we can commit them \
92 * after the memory. \
93 */ \
94 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
95 { \
96 IEM_MC_BEGIN(3, 3, 0, 0); \
97 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
98 IEM_MC_ARG(uint8_t, u8Src, 1); \
99 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
101 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
102 \
103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
104 IEMOP_HLP_DONE_DECODING(); \
105 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
106 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
107 IEM_MC_FETCH_EFLAGS(EFlags); \
108 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
109 \
110 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
111 IEM_MC_COMMIT_EFLAGS(EFlags); \
112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
113 IEM_MC_END(); \
114 } \
115 else \
116 { \
117 (void)0
118
119/**
120 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
121 * operands.
122 *
123 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
124 */
125#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8) \
126 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
127 \
128 /* \
129 * If rm is denoting a register, no more instruction bytes. \
130 */ \
131 if (IEM_IS_MODRM_REG_MODE(bRm)) \
132 { \
133 IEM_MC_BEGIN(3, 0, 0, 0); \
134 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
135 IEM_MC_ARG(uint8_t, u8Src, 1); \
136 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
137 \
138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
139 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
140 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
141 IEM_MC_REF_EFLAGS(pEFlags); \
142 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
143 \
144 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
145 IEM_MC_END(); \
146 } \
147 else \
148 { \
149 /* \
150 * We're accessing memory. \
151 * Note! We're putting the eflags on the stack here so we can commit them \
152 * after the memory. \
153 */ \
154 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
155 { \
156 IEM_MC_BEGIN(3, 3, 0, 0); \
157 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
158 IEM_MC_ARG(uint8_t, u8Src, 1); \
159 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
161 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
162 \
163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
164 IEMOP_HLP_DONE_DECODING(); \
165 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
166 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
167 IEM_MC_FETCH_EFLAGS(EFlags); \
168 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
169 \
170 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
171 IEM_MC_COMMIT_EFLAGS(EFlags); \
172 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
173 IEM_MC_END(); \
174 } \
175 else \
176 { \
177 (void)0
178
179#define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \
180 IEMOP_HLP_DONE_DECODING(); \
181 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
182 } \
183 } \
184 (void)0
185
186#define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \
187 IEM_MC_BEGIN(3, 3, 0, 0); \
188 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
189 IEM_MC_ARG(uint8_t, u8Src, 1); \
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
192 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
193 \
194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
195 IEMOP_HLP_DONE_DECODING(); \
196 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
197 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
198 IEM_MC_FETCH_EFLAGS(EFlags); \
199 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
200 \
201 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bMapInfoDst); \
202 IEM_MC_COMMIT_EFLAGS(EFlags); \
203 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
204 IEM_MC_END(); \
205 } \
206 } \
207 (void)0
208
209/**
210 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
211 * destination.
212 */
213#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
215 \
216 /* \
217 * If rm is denoting a register, no more instruction bytes. \
218 */ \
219 if (IEM_IS_MODRM_REG_MODE(bRm)) \
220 { \
221 IEM_MC_BEGIN(3, 0, 0, 0); \
222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
223 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
224 IEM_MC_ARG(uint8_t, u8Src, 1); \
225 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
226 \
227 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
228 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
229 IEM_MC_REF_EFLAGS(pEFlags); \
230 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
231 \
232 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
233 IEM_MC_END(); \
234 } \
235 else \
236 { \
237 /* \
238 * We're accessing memory. \
239 */ \
240 IEM_MC_BEGIN(3, 1, 0, 0); \
241 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
242 IEM_MC_ARG(uint8_t, u8Src, 1); \
243 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
245 \
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
248 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
249 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
250 IEM_MC_REF_EFLAGS(pEFlags); \
251 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
252 \
253 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
254 IEM_MC_END(); \
255 } \
256 (void)0
257
258
259/**
260 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
261 * memory/register as the destination.
262 */
263#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
265 \
266 /* \
267 * If rm is denoting a register, no more instruction bytes. \
268 */ \
269 if (IEM_IS_MODRM_REG_MODE(bRm)) \
270 { \
271 switch (pVCpu->iem.s.enmEffOpSize) \
272 { \
273 case IEMMODE_16BIT: \
274 IEM_MC_BEGIN(3, 0, 0, 0); \
275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
276 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
277 IEM_MC_ARG(uint16_t, u16Src, 1); \
278 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
279 \
280 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
281 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
282 IEM_MC_REF_EFLAGS(pEFlags); \
283 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
284 \
285 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
286 IEM_MC_END(); \
287 break; \
288 \
289 case IEMMODE_32BIT: \
290 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
292 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
293 IEM_MC_ARG(uint32_t, u32Src, 1); \
294 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
295 \
296 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
297 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
298 IEM_MC_REF_EFLAGS(pEFlags); \
299 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
300 \
301 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
302 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
303 IEM_MC_END(); \
304 break; \
305 \
306 case IEMMODE_64BIT: \
307 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
309 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
310 IEM_MC_ARG(uint64_t, u64Src, 1); \
311 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
312 \
313 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
314 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
315 IEM_MC_REF_EFLAGS(pEFlags); \
316 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
317 \
318 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
319 IEM_MC_END(); \
320 break; \
321 \
322 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
323 } \
324 } \
325 else \
326 { \
327 /* \
328 * We're accessing memory. \
329 * Note! We're putting the eflags on the stack here so we can commit them \
330 * after the memory. \
331 */ \
332 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
333 { \
334 switch (pVCpu->iem.s.enmEffOpSize) \
335 { \
336 case IEMMODE_16BIT: \
337 IEM_MC_BEGIN(3, 3, 0, 0); \
338 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
339 IEM_MC_ARG(uint16_t, u16Src, 1); \
340 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
342 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
343 \
344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
345 IEMOP_HLP_DONE_DECODING(); \
346 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
347 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
348 IEM_MC_FETCH_EFLAGS(EFlags); \
349 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
350 \
351 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
352 IEM_MC_COMMIT_EFLAGS(EFlags); \
353 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
354 IEM_MC_END(); \
355 break; \
356 \
357 case IEMMODE_32BIT: \
358 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
359 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
360 IEM_MC_ARG(uint32_t, u32Src, 1); \
361 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
363 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
364 \
365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
366 IEMOP_HLP_DONE_DECODING(); \
367 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
368 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
369 IEM_MC_FETCH_EFLAGS(EFlags); \
370 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
371 \
372 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
373 IEM_MC_COMMIT_EFLAGS(EFlags); \
374 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
375 IEM_MC_END(); \
376 break; \
377 \
378 case IEMMODE_64BIT: \
379 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
380 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
381 IEM_MC_ARG(uint64_t, u64Src, 1); \
382 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
384 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
385 \
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
387 IEMOP_HLP_DONE_DECODING(); \
388 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
389 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
390 IEM_MC_FETCH_EFLAGS(EFlags); \
391 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
392 \
393 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
394 IEM_MC_COMMIT_EFLAGS(EFlags); \
395 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
396 IEM_MC_END(); \
397 break; \
398 \
399 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
400 } \
401 } \
402 else \
403 { \
404 (void)0
405/* Separate macro to work around parsing issue in IEMAllInstPython.py */
406#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
407 switch (pVCpu->iem.s.enmEffOpSize) \
408 { \
409 case IEMMODE_16BIT: \
410 IEM_MC_BEGIN(3, 3, 0, 0); \
411 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
412 IEM_MC_ARG(uint16_t, u16Src, 1); \
413 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
415 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
416 \
417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
418 IEMOP_HLP_DONE_DECODING(); \
419 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
420 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
421 IEM_MC_FETCH_EFLAGS(EFlags); \
422 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
423 \
424 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
425 IEM_MC_COMMIT_EFLAGS(EFlags); \
426 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
427 IEM_MC_END(); \
428 break; \
429 \
430 case IEMMODE_32BIT: \
431 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
432 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
433 IEM_MC_ARG(uint32_t, u32Src, 1); \
434 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
436 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
437 \
438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
439 IEMOP_HLP_DONE_DECODING(); \
440 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
441 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
442 IEM_MC_FETCH_EFLAGS(EFlags); \
443 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
444 \
445 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo /* CMP,TEST */); \
446 IEM_MC_COMMIT_EFLAGS(EFlags); \
447 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
448 IEM_MC_END(); \
449 break; \
450 \
451 case IEMMODE_64BIT: \
452 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
453 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
454 IEM_MC_ARG(uint64_t, u64Src, 1); \
455 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
457 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
458 \
459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
460 IEMOP_HLP_DONE_DECODING(); \
461 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
462 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
463 IEM_MC_FETCH_EFLAGS(EFlags); \
464 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
465 \
466 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
467 IEM_MC_COMMIT_EFLAGS(EFlags); \
468 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
469 IEM_MC_END(); \
470 break; \
471 \
472 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
473 } \
474 } \
475 } \
476 (void)0
477
478/**
479 * Body for read-only word/dword/qword instructions like TEST and CMP with
480 * memory/register as the destination.
481 */
482#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
484 \
485 /* \
486 * If rm is denoting a register, no more instruction bytes. \
487 */ \
488 if (IEM_IS_MODRM_REG_MODE(bRm)) \
489 { \
490 switch (pVCpu->iem.s.enmEffOpSize) \
491 { \
492 case IEMMODE_16BIT: \
493 IEM_MC_BEGIN(3, 0, 0, 0); \
494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
495 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
496 IEM_MC_ARG(uint16_t, u16Src, 1); \
497 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
498 \
499 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
500 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
501 IEM_MC_REF_EFLAGS(pEFlags); \
502 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
503 \
504 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
505 IEM_MC_END(); \
506 break; \
507 \
508 case IEMMODE_32BIT: \
509 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
511 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
512 IEM_MC_ARG(uint32_t, u32Src, 1); \
513 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
514 \
515 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
516 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
517 IEM_MC_REF_EFLAGS(pEFlags); \
518 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
519 \
520 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
521 IEM_MC_END(); \
522 break; \
523 \
524 case IEMMODE_64BIT: \
525 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
527 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
528 IEM_MC_ARG(uint64_t, u64Src, 1); \
529 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
530 \
531 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
532 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
533 IEM_MC_REF_EFLAGS(pEFlags); \
534 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
535 \
536 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
537 IEM_MC_END(); \
538 break; \
539 \
540 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
541 } \
542 } \
543 else \
544 { \
545 /* \
546 * We're accessing memory. \
547 * Note! We're putting the eflags on the stack here so we can commit them \
548 * after the memory. \
549 */ \
550 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
551 { \
552 switch (pVCpu->iem.s.enmEffOpSize) \
553 { \
554 case IEMMODE_16BIT: \
555 IEM_MC_BEGIN(3, 3, 0, 0); \
556 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
557 IEM_MC_ARG(uint16_t, u16Src, 1); \
558 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
560 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
561 \
562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
563 IEMOP_HLP_DONE_DECODING(); \
564 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
565 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
566 IEM_MC_FETCH_EFLAGS(EFlags); \
567 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
568 \
569 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
570 IEM_MC_COMMIT_EFLAGS(EFlags); \
571 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
572 IEM_MC_END(); \
573 break; \
574 \
575 case IEMMODE_32BIT: \
576 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
577 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
578 IEM_MC_ARG(uint32_t, u32Src, 1); \
579 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
581 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
582 \
583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
584 IEMOP_HLP_DONE_DECODING(); \
585 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
586 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
587 IEM_MC_FETCH_EFLAGS(EFlags); \
588 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
589 \
590 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
591 IEM_MC_COMMIT_EFLAGS(EFlags); \
592 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
593 IEM_MC_END(); \
594 break; \
595 \
596 case IEMMODE_64BIT: \
597 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
598 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
599 IEM_MC_ARG(uint64_t, u64Src, 1); \
600 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
602 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
603 \
604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
605 IEMOP_HLP_DONE_DECODING(); \
606 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
607 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
608 IEM_MC_FETCH_EFLAGS(EFlags); \
609 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
610 \
611 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
612 IEM_MC_COMMIT_EFLAGS(EFlags); \
613 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
614 IEM_MC_END(); \
615 break; \
616 \
617 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
618 } \
619 } \
620 else \
621 { \
622 IEMOP_HLP_DONE_DECODING(); \
623 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
624 } \
625 } \
626 (void)0
627
628
629/**
630 * Body for instructions like ADD, AND, OR, ++ with working on AL with
631 * a byte immediate.
632 */
633#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
634 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
635 \
636 IEM_MC_BEGIN(3, 0, 0, 0); \
637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
638 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
639 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
640 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
641 \
642 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
643 IEM_MC_REF_EFLAGS(pEFlags); \
644 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
645 \
646 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
647 IEM_MC_END()
648
649/**
650 * Body for instructions like ADD, AND, OR, ++ with working on
651 * AX/EAX/RAX with a word/dword immediate.
652 */
653#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
654 switch (pVCpu->iem.s.enmEffOpSize) \
655 { \
656 case IEMMODE_16BIT: \
657 { \
658 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
659 \
660 IEM_MC_BEGIN(3, 0, 0, 0); \
661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
662 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
663 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
664 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
665 \
666 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
667 IEM_MC_REF_EFLAGS(pEFlags); \
668 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
669 \
670 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
671 IEM_MC_END(); \
672 } \
673 \
674 case IEMMODE_32BIT: \
675 { \
676 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
677 \
678 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
680 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
681 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
682 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
683 \
684 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
685 IEM_MC_REF_EFLAGS(pEFlags); \
686 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
687 \
688 if (a_fModifiesDstReg) \
689 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
690 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
691 IEM_MC_END(); \
692 } \
693 \
694 case IEMMODE_64BIT: \
695 { \
696 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
697 \
698 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
700 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
701 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
702 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
703 \
704 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
705 IEM_MC_REF_EFLAGS(pEFlags); \
706 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
707 \
708 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
709 IEM_MC_END(); \
710 } \
711 \
712 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
713 } \
714 (void)0
715
716
717
718/* Instruction specification format - work in progress: */
719
720/**
721 * @opcode 0x00
722 * @opmnemonic add
723 * @op1 rm:Eb
724 * @op2 reg:Gb
725 * @opmaps one
726 * @openc ModR/M
727 * @opflmodify cf,pf,af,zf,sf,of
728 * @ophints harmless ignores_op_sizes
729 * @opstats add_Eb_Gb
730 * @opgroup og_gen_arith_bin
731 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
732 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
733 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
734 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
735 */
736FNIEMOP_DEF(iemOp_add_Eb_Gb)
737{
738 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
739 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_add_u8);
740 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked);
741}
742
743
744/**
745 * @opcode 0x01
746 * @opgroup og_gen_arith_bin
747 * @opflmodify cf,pf,af,zf,sf,of
748 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
749 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
750 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
751 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
752 */
753FNIEMOP_DEF(iemOp_add_Ev_Gv)
754{
755 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
756 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
757 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
758}
759
760
761/**
762 * @opcode 0x02
763 * @opgroup og_gen_arith_bin
764 * @opflmodify cf,pf,af,zf,sf,of
765 * @opcopytests iemOp_add_Eb_Gb
766 */
767FNIEMOP_DEF(iemOp_add_Gb_Eb)
768{
769 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
770 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
771}
772
773
774/**
775 * @opcode 0x03
776 * @opgroup og_gen_arith_bin
777 * @opflmodify cf,pf,af,zf,sf,of
778 * @opcopytests iemOp_add_Ev_Gv
779 */
780FNIEMOP_DEF(iemOp_add_Gv_Ev)
781{
782 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
783 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1, 0);
784}
785
786
787/**
788 * @opcode 0x04
789 * @opgroup og_gen_arith_bin
790 * @opflmodify cf,pf,af,zf,sf,of
791 * @opcopytests iemOp_add_Eb_Gb
792 */
793FNIEMOP_DEF(iemOp_add_Al_Ib)
794{
795 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
796 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
797}
798
799
800/**
801 * @opcode 0x05
802 * @opgroup og_gen_arith_bin
803 * @opflmodify cf,pf,af,zf,sf,of
804 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
805 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
806 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
807 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
808 */
809FNIEMOP_DEF(iemOp_add_eAX_Iz)
810{
811 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
812 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
813}
814
815
816/**
817 * @opcode 0x06
818 * @opgroup og_stack_sreg
819 */
820FNIEMOP_DEF(iemOp_push_ES)
821{
822 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
823 IEMOP_HLP_NO_64BIT();
824 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
825}
826
827
828/**
829 * @opcode 0x07
830 * @opgroup og_stack_sreg
831 */
832FNIEMOP_DEF(iemOp_pop_ES)
833{
834 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
835 IEMOP_HLP_NO_64BIT();
836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
837 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
838}
839
840
841/**
842 * @opcode 0x08
843 * @opgroup og_gen_arith_bin
844 * @opflmodify cf,pf,af,zf,sf,of
845 * @opflundef af
846 * @opflclear of,cf
847 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
848 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
849 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
850 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
851 */
852FNIEMOP_DEF(iemOp_or_Eb_Gb)
853{
854 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
855 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
856 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_or_u8);
857 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked);
858}
859
860
861/*
862 * @opcode 0x09
863 * @opgroup og_gen_arith_bin
864 * @opflmodify cf,pf,af,zf,sf,of
865 * @opflundef af
866 * @opflclear of,cf
867 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
868 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
869 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
870 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
871 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
872 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
873 */
874FNIEMOP_DEF(iemOp_or_Ev_Gv)
875{
876 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
877 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
878 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
879 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
880}
881
882
883/**
884 * @opcode 0x0a
885 * @opgroup og_gen_arith_bin
886 * @opflmodify cf,pf,af,zf,sf,of
887 * @opflundef af
888 * @opflclear of,cf
889 * @opcopytests iemOp_or_Eb_Gb
890 */
891FNIEMOP_DEF(iemOp_or_Gb_Eb)
892{
893 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
894 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
895 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
896}
897
898
899/**
900 * @opcode 0x0b
901 * @opgroup og_gen_arith_bin
902 * @opflmodify cf,pf,af,zf,sf,of
903 * @opflundef af
904 * @opflclear of,cf
905 * @opcopytests iemOp_or_Ev_Gv
906 */
907FNIEMOP_DEF(iemOp_or_Gv_Ev)
908{
909 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
910 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
911 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1, 0);
912}
913
914
915/**
916 * @opcode 0x0c
917 * @opgroup og_gen_arith_bin
918 * @opflmodify cf,pf,af,zf,sf,of
919 * @opflundef af
920 * @opflclear of,cf
921 * @opcopytests iemOp_or_Eb_Gb
922 */
923FNIEMOP_DEF(iemOp_or_Al_Ib)
924{
925 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
926 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
927 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
928}
929
930
931/**
932 * @opcode 0x0d
933 * @opgroup og_gen_arith_bin
934 * @opflmodify cf,pf,af,zf,sf,of
935 * @opflundef af
936 * @opflclear of,cf
937 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
938 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
939 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
940 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
941 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
942 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
943 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
944 */
945FNIEMOP_DEF(iemOp_or_eAX_Iz)
946{
947 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
948 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
949 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
950}
951
952
953/**
954 * @opcode 0x0e
955 * @opgroup og_stack_sreg
956 */
957FNIEMOP_DEF(iemOp_push_CS)
958{
959 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
960 IEMOP_HLP_NO_64BIT();
961 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
962}
963
964
965/**
966 * @opcode 0x0f
967 * @opmnemonic EscTwo0f
968 * @openc two0f
969 * @opdisenum OP_2B_ESC
970 * @ophints harmless
971 * @opgroup og_escapes
972 */
973FNIEMOP_DEF(iemOp_2byteEscape)
974{
975#if 0 /// @todo def VBOX_STRICT
976 /* Sanity check the table the first time around. */
977 static bool s_fTested = false;
978 if (RT_LIKELY(s_fTested)) { /* likely */ }
979 else
980 {
981 s_fTested = true;
982 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
983 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
984 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
985 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
986 }
987#endif
988
989 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
990 {
991 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
992 IEMOP_HLP_MIN_286();
993 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
994 }
995 /* @opdone */
996
997 /*
998 * On the 8086 this is a POP CS instruction.
999 * For the time being we don't specify this this.
1000 */
1001 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
1002 IEMOP_HLP_NO_64BIT();
1003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1004 /** @todo eliminate END_TB here */
1005 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1006 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1007}
1008
1009/**
1010 * @opcode 0x10
1011 * @opgroup og_gen_arith_bin
1012 * @opfltest cf
1013 * @opflmodify cf,pf,af,zf,sf,of
1014 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1015 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1016 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1017 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1018 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1019 */
1020FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1021{
1022 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1023 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_adc_u8);
1024 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked);
1025}
1026
1027
1028/**
1029 * @opcode 0x11
1030 * @opgroup og_gen_arith_bin
1031 * @opfltest cf
1032 * @opflmodify cf,pf,af,zf,sf,of
1033 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1034 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1035 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1036 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1037 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1038 */
1039FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1040{
1041 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1042 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
1043 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1044}
1045
1046
1047/**
1048 * @opcode 0x12
1049 * @opgroup og_gen_arith_bin
1050 * @opfltest cf
1051 * @opflmodify cf,pf,af,zf,sf,of
1052 * @opcopytests iemOp_adc_Eb_Gb
1053 */
1054FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1055{
1056 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1057 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
1058}
1059
1060
1061/**
1062 * @opcode 0x13
1063 * @opgroup og_gen_arith_bin
1064 * @opfltest cf
1065 * @opflmodify cf,pf,af,zf,sf,of
1066 * @opcopytests iemOp_adc_Ev_Gv
1067 */
1068FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1069{
1070 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1071 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1, 0);
1072}
1073
1074
1075/**
1076 * @opcode 0x14
1077 * @opgroup og_gen_arith_bin
1078 * @opfltest cf
1079 * @opflmodify cf,pf,af,zf,sf,of
1080 * @opcopytests iemOp_adc_Eb_Gb
1081 */
1082FNIEMOP_DEF(iemOp_adc_Al_Ib)
1083{
1084 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1085 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1086}
1087
1088
1089/**
1090 * @opcode 0x15
1091 * @opgroup og_gen_arith_bin
1092 * @opfltest cf
1093 * @opflmodify cf,pf,af,zf,sf,of
1094 * @opcopytests iemOp_adc_Ev_Gv
1095 */
1096FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1097{
1098 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1099 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1100}
1101
1102
1103/**
1104 * @opcode 0x16
1105 */
1106FNIEMOP_DEF(iemOp_push_SS)
1107{
1108 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1109 IEMOP_HLP_NO_64BIT();
1110 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1111}
1112
1113
1114/**
1115 * @opcode 0x17
1116 * @opgroup og_gen_arith_bin
1117 * @opfltest cf
1118 * @opflmodify cf,pf,af,zf,sf,of
1119 */
1120FNIEMOP_DEF(iemOp_pop_SS)
1121{
1122 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1124 IEMOP_HLP_NO_64BIT();
1125 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1126 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1127}
1128
1129
1130/**
1131 * @opcode 0x18
1132 * @opgroup og_gen_arith_bin
1133 * @opfltest cf
1134 * @opflmodify cf,pf,af,zf,sf,of
1135 */
1136FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1137{
1138 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1139 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sbb_u8);
1140 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked);
1141}
1142
1143
1144/**
1145 * @opcode 0x19
1146 * @opgroup og_gen_arith_bin
1147 * @opfltest cf
1148 * @opflmodify cf,pf,af,zf,sf,of
1149 */
1150FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1151{
1152 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1153 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
1154 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1155}
1156
1157
1158/**
1159 * @opcode 0x1a
1160 * @opgroup og_gen_arith_bin
1161 * @opfltest cf
1162 * @opflmodify cf,pf,af,zf,sf,of
1163 */
1164FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1165{
1166 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1167 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
1168}
1169
1170
1171/**
1172 * @opcode 0x1b
1173 * @opgroup og_gen_arith_bin
1174 * @opfltest cf
1175 * @opflmodify cf,pf,af,zf,sf,of
1176 */
1177FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1178{
1179 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1180 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1, 0);
1181}
1182
1183
1184/**
1185 * @opcode 0x1c
1186 * @opgroup og_gen_arith_bin
1187 * @opfltest cf
1188 * @opflmodify cf,pf,af,zf,sf,of
1189 */
1190FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1191{
1192 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1193 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1194}
1195
1196
1197/**
1198 * @opcode 0x1d
1199 * @opgroup og_gen_arith_bin
1200 * @opfltest cf
1201 * @opflmodify cf,pf,af,zf,sf,of
1202 */
1203FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1204{
1205 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1206 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1207}
1208
1209
1210/**
1211 * @opcode 0x1e
1212 * @opgroup og_stack_sreg
1213 */
1214FNIEMOP_DEF(iemOp_push_DS)
1215{
1216 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1217 IEMOP_HLP_NO_64BIT();
1218 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1219}
1220
1221
1222/**
1223 * @opcode 0x1f
1224 * @opgroup og_stack_sreg
1225 */
1226FNIEMOP_DEF(iemOp_pop_DS)
1227{
1228 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1230 IEMOP_HLP_NO_64BIT();
1231 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1232}
1233
1234
1235/**
1236 * @opcode 0x20
1237 * @opgroup og_gen_arith_bin
1238 * @opflmodify cf,pf,af,zf,sf,of
1239 * @opflundef af
1240 * @opflclear of,cf
1241 */
1242FNIEMOP_DEF(iemOp_and_Eb_Gb)
1243{
1244 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1245 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1246 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_and_u8);
1247 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked);
1248}
1249
1250
1251/**
1252 * @opcode 0x21
1253 * @opgroup og_gen_arith_bin
1254 * @opflmodify cf,pf,af,zf,sf,of
1255 * @opflundef af
1256 * @opflclear of,cf
1257 */
1258FNIEMOP_DEF(iemOp_and_Ev_Gv)
1259{
1260 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1261 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1262 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
1263 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1264}
1265
1266
1267/**
1268 * @opcode 0x22
1269 * @opgroup og_gen_arith_bin
1270 * @opflmodify cf,pf,af,zf,sf,of
1271 * @opflundef af
1272 * @opflclear of,cf
1273 */
1274FNIEMOP_DEF(iemOp_and_Gb_Eb)
1275{
1276 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1277 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1278 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1279}
1280
1281
1282/**
1283 * @opcode 0x23
1284 * @opgroup og_gen_arith_bin
1285 * @opflmodify cf,pf,af,zf,sf,of
1286 * @opflundef af
1287 * @opflclear of,cf
1288 */
1289FNIEMOP_DEF(iemOp_and_Gv_Ev)
1290{
1291 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1292 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1293 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1, 0);
1294}
1295
1296
1297/**
1298 * @opcode 0x24
1299 * @opgroup og_gen_arith_bin
1300 * @opflmodify cf,pf,af,zf,sf,of
1301 * @opflundef af
1302 * @opflclear of,cf
1303 */
1304FNIEMOP_DEF(iemOp_and_Al_Ib)
1305{
1306 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1307 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1308 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1309}
1310
1311
1312/**
1313 * @opcode 0x25
1314 * @opgroup og_gen_arith_bin
1315 * @opflmodify cf,pf,af,zf,sf,of
1316 * @opflundef af
1317 * @opflclear of,cf
1318 */
1319FNIEMOP_DEF(iemOp_and_eAX_Iz)
1320{
1321 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1322 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1323 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1324}
1325
1326
1327/**
1328 * @opcode 0x26
1329 * @opmnemonic SEG
1330 * @op1 ES
1331 * @opgroup og_prefix
1332 * @openc prefix
1333 * @opdisenum OP_SEG
1334 * @ophints harmless
1335 */
1336FNIEMOP_DEF(iemOp_seg_ES)
1337{
1338 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1339 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1340 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1341
1342 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1343 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1344}
1345
1346
1347/**
1348 * @opcode 0x27
1349 * @opfltest af,cf
1350 * @opflmodify cf,pf,af,zf,sf,of
1351 * @opflundef of
1352 */
1353FNIEMOP_DEF(iemOp_daa)
1354{
1355 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1356 IEMOP_HLP_NO_64BIT();
1357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1358 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1359 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_daa);
1360}
1361
1362
1363/**
1364 * @opcode 0x28
1365 * @opgroup og_gen_arith_bin
1366 * @opflmodify cf,pf,af,zf,sf,of
1367 */
1368FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1369{
1370 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1371 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sub_u8);
1372 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked);
1373}
1374
1375
1376/**
1377 * @opcode 0x29
1378 * @opgroup og_gen_arith_bin
1379 * @opflmodify cf,pf,af,zf,sf,of
1380 */
1381FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1382{
1383 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1384 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
1385 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1386}
1387
1388
1389/**
1390 * @opcode 0x2a
1391 * @opgroup og_gen_arith_bin
1392 * @opflmodify cf,pf,af,zf,sf,of
1393 */
1394FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1395{
1396 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1397 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1398}
1399
1400
1401/**
1402 * @opcode 0x2b
1403 * @opgroup og_gen_arith_bin
1404 * @opflmodify cf,pf,af,zf,sf,of
1405 */
1406FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1407{
1408 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1409 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1, 0);
1410}
1411
1412
1413/**
1414 * @opcode 0x2c
1415 * @opgroup og_gen_arith_bin
1416 * @opflmodify cf,pf,af,zf,sf,of
1417 */
1418FNIEMOP_DEF(iemOp_sub_Al_Ib)
1419{
1420 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1421 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1422}
1423
1424
1425/**
1426 * @opcode 0x2d
1427 * @opgroup og_gen_arith_bin
1428 * @opflmodify cf,pf,af,zf,sf,of
1429 */
1430FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1431{
1432 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1433 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1434}
1435
1436
1437/**
1438 * @opcode 0x2e
1439 * @opmnemonic SEG
1440 * @op1 CS
1441 * @opgroup og_prefix
1442 * @openc prefix
1443 * @opdisenum OP_SEG
1444 * @ophints harmless
1445 */
1446FNIEMOP_DEF(iemOp_seg_CS)
1447{
1448 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1449 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1450 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1451
1452 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1453 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1454}
1455
1456
1457/**
1458 * @opcode 0x2f
1459 * @opfltest af,cf
1460 * @opflmodify cf,pf,af,zf,sf,of
1461 * @opflundef of
1462 */
1463FNIEMOP_DEF(iemOp_das)
1464{
1465 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1466 IEMOP_HLP_NO_64BIT();
1467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1468 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1469 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_das);
1470}
1471
1472
1473/**
1474 * @opcode 0x30
1475 * @opgroup og_gen_arith_bin
1476 * @opflmodify cf,pf,af,zf,sf,of
1477 * @opflundef af
1478 * @opflclear of,cf
1479 */
1480FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1481{
1482 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1483 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1484 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_xor_u8);
1485 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked);
1486}
1487
1488
1489/**
1490 * @opcode 0x31
1491 * @opgroup og_gen_arith_bin
1492 * @opflmodify cf,pf,af,zf,sf,of
1493 * @opflundef af
1494 * @opflclear of,cf
1495 */
1496FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1497{
1498 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1499 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1500 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
1501 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1502}
1503
1504
1505/**
1506 * @opcode 0x32
1507 * @opgroup og_gen_arith_bin
1508 * @opflmodify cf,pf,af,zf,sf,of
1509 * @opflundef af
1510 * @opflclear of,cf
1511 */
1512FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1513{
1514 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1515 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1516 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1517}
1518
1519
1520/**
1521 * @opcode 0x33
1522 * @opgroup og_gen_arith_bin
1523 * @opflmodify cf,pf,af,zf,sf,of
1524 * @opflundef af
1525 * @opflclear of,cf
1526 */
1527FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1528{
1529 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1530 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1531 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1, 0);
1532}
1533
1534
1535/**
1536 * @opcode 0x34
1537 * @opgroup og_gen_arith_bin
1538 * @opflmodify cf,pf,af,zf,sf,of
1539 * @opflundef af
1540 * @opflclear of,cf
1541 */
1542FNIEMOP_DEF(iemOp_xor_Al_Ib)
1543{
1544 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1545 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1546 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1547}
1548
1549
1550/**
1551 * @opcode 0x35
1552 * @opgroup og_gen_arith_bin
1553 * @opflmodify cf,pf,af,zf,sf,of
1554 * @opflundef af
1555 * @opflclear of,cf
1556 */
1557FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1558{
1559 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1560 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1561 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1562}
1563
1564
1565/**
1566 * @opcode 0x36
1567 * @opmnemonic SEG
1568 * @op1 SS
1569 * @opgroup og_prefix
1570 * @openc prefix
1571 * @opdisenum OP_SEG
1572 * @ophints harmless
1573 */
1574FNIEMOP_DEF(iemOp_seg_SS)
1575{
1576 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1577 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1578 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1579
1580 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1581 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1582}
1583
1584
1585/**
1586 * @opcode 0x37
1587 * @opfltest af,cf
1588 * @opflmodify cf,pf,af,zf,sf,of
1589 * @opflundef pf,zf,sf,of
1590 * @opgroup og_gen_arith_dec
1591 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1592 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1593 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1594 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1595 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1596 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1597 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1598 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1599 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1600 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1601 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1602 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1603 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1604 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1605 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1606 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1607 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1608 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1609 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1610 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1611 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1612 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1613 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1614 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1615 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1616 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1617 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1618 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1619 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1620 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1621 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1622 */
1623FNIEMOP_DEF(iemOp_aaa)
1624{
1625 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1626 IEMOP_HLP_NO_64BIT();
1627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1628 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1629
1630 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aaa);
1631}
1632
1633
1634/**
1635 * @opcode 0x38
1636 */
1637FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1638{
1639 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1640 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8);
1641 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
1642}
1643
1644
1645/**
1646 * @opcode 0x39
1647 */
1648FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1649{
1650 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1651 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
1652}
1653
1654
1655/**
1656 * @opcode 0x3a
1657 */
1658FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1659{
1660 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1661 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1662}
1663
1664
1665/**
1666 * @opcode 0x3b
1667 */
1668FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1669{
1670 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1671 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0, 0);
1672}
1673
1674
1675/**
1676 * @opcode 0x3c
1677 */
1678FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1679{
1680 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1681 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1682}
1683
1684
1685/**
1686 * @opcode 0x3d
1687 */
1688FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1689{
1690 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1691 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1692}
1693
1694
1695/**
1696 * @opcode 0x3e
1697 */
1698FNIEMOP_DEF(iemOp_seg_DS)
1699{
1700 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1701 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1702 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1703
1704 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1705 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1706}
1707
1708
1709/**
1710 * @opcode 0x3f
1711 * @opfltest af,cf
1712 * @opflmodify cf,pf,af,zf,sf,of
1713 * @opflundef pf,zf,sf,of
1714 * @opgroup og_gen_arith_dec
1715 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1716 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1717 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1718 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1719 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1720 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1721 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1722 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1723 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1724 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1725 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1726 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1727 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1728 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1729 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1730 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1731 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1732 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1733 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1734 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1735 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1736 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1737 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1738 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1739 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1740 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1741 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1742 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1743 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1744 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1745 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1746 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1747 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1748 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1749 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1750 */
1751FNIEMOP_DEF(iemOp_aas)
1752{
1753 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1754 IEMOP_HLP_NO_64BIT();
1755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1756 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1757
1758 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aas);
1759}
1760
1761
1762/**
1763 * Common 'inc/dec register' helper.
1764 *
1765 * Not for 64-bit code, only for what became the rex prefixes.
1766 */
1767#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1768 switch (pVCpu->iem.s.enmEffOpSize) \
1769 { \
1770 case IEMMODE_16BIT: \
1771 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT, 0); \
1772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1773 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1774 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1775 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1776 IEM_MC_REF_EFLAGS(pEFlags); \
1777 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1778 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1779 IEM_MC_END(); \
1780 break; \
1781 \
1782 case IEMMODE_32BIT: \
1783 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
1784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1785 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1786 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1787 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1788 IEM_MC_REF_EFLAGS(pEFlags); \
1789 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1790 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
1791 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1792 IEM_MC_END(); \
1793 break; \
1794 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1795 } \
1796 (void)0
1797
1798/**
1799 * @opcode 0x40
1800 */
1801FNIEMOP_DEF(iemOp_inc_eAX)
1802{
1803 /*
1804 * This is a REX prefix in 64-bit mode.
1805 */
1806 if (IEM_IS_64BIT_CODE(pVCpu))
1807 {
1808 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1809 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1810
1811 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1812 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1813 }
1814
1815 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1816 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1817}
1818
1819
1820/**
1821 * @opcode 0x41
1822 */
1823FNIEMOP_DEF(iemOp_inc_eCX)
1824{
1825 /*
1826 * This is a REX prefix in 64-bit mode.
1827 */
1828 if (IEM_IS_64BIT_CODE(pVCpu))
1829 {
1830 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1831 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1832 pVCpu->iem.s.uRexB = 1 << 3;
1833
1834 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1835 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1836 }
1837
1838 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1839 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1840}
1841
1842
1843/**
1844 * @opcode 0x42
1845 */
1846FNIEMOP_DEF(iemOp_inc_eDX)
1847{
1848 /*
1849 * This is a REX prefix in 64-bit mode.
1850 */
1851 if (IEM_IS_64BIT_CODE(pVCpu))
1852 {
1853 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1854 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1855 pVCpu->iem.s.uRexIndex = 1 << 3;
1856
1857 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1858 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1859 }
1860
1861 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1862 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1863}
1864
1865
1866
1867/**
1868 * @opcode 0x43
1869 */
1870FNIEMOP_DEF(iemOp_inc_eBX)
1871{
1872 /*
1873 * This is a REX prefix in 64-bit mode.
1874 */
1875 if (IEM_IS_64BIT_CODE(pVCpu))
1876 {
1877 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1878 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1879 pVCpu->iem.s.uRexB = 1 << 3;
1880 pVCpu->iem.s.uRexIndex = 1 << 3;
1881
1882 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1883 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1884 }
1885
1886 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1887 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1888}
1889
1890
1891/**
1892 * @opcode 0x44
1893 */
1894FNIEMOP_DEF(iemOp_inc_eSP)
1895{
1896 /*
1897 * This is a REX prefix in 64-bit mode.
1898 */
1899 if (IEM_IS_64BIT_CODE(pVCpu))
1900 {
1901 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1902 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1903 pVCpu->iem.s.uRexReg = 1 << 3;
1904
1905 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1906 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1907 }
1908
1909 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1910 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1911}
1912
1913
1914/**
1915 * @opcode 0x45
1916 */
1917FNIEMOP_DEF(iemOp_inc_eBP)
1918{
1919 /*
1920 * This is a REX prefix in 64-bit mode.
1921 */
1922 if (IEM_IS_64BIT_CODE(pVCpu))
1923 {
1924 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1925 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1926 pVCpu->iem.s.uRexReg = 1 << 3;
1927 pVCpu->iem.s.uRexB = 1 << 3;
1928
1929 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1930 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1931 }
1932
1933 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1934 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1935}
1936
1937
1938/**
1939 * @opcode 0x46
1940 */
1941FNIEMOP_DEF(iemOp_inc_eSI)
1942{
1943 /*
1944 * This is a REX prefix in 64-bit mode.
1945 */
1946 if (IEM_IS_64BIT_CODE(pVCpu))
1947 {
1948 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1949 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1950 pVCpu->iem.s.uRexReg = 1 << 3;
1951 pVCpu->iem.s.uRexIndex = 1 << 3;
1952
1953 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1954 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1955 }
1956
1957 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1958 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1959}
1960
1961
1962/**
1963 * @opcode 0x47
1964 */
1965FNIEMOP_DEF(iemOp_inc_eDI)
1966{
1967 /*
1968 * This is a REX prefix in 64-bit mode.
1969 */
1970 if (IEM_IS_64BIT_CODE(pVCpu))
1971 {
1972 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1973 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1974 pVCpu->iem.s.uRexReg = 1 << 3;
1975 pVCpu->iem.s.uRexB = 1 << 3;
1976 pVCpu->iem.s.uRexIndex = 1 << 3;
1977
1978 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1979 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1980 }
1981
1982 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1983 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
1984}
1985
1986
1987/**
1988 * @opcode 0x48
1989 */
1990FNIEMOP_DEF(iemOp_dec_eAX)
1991{
1992 /*
1993 * This is a REX prefix in 64-bit mode.
1994 */
1995 if (IEM_IS_64BIT_CODE(pVCpu))
1996 {
1997 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1998 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1999 iemRecalEffOpSize(pVCpu);
2000
2001 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2002 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2003 }
2004
2005 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2006 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2007}
2008
2009
2010/**
2011 * @opcode 0x49
2012 */
2013FNIEMOP_DEF(iemOp_dec_eCX)
2014{
2015 /*
2016 * This is a REX prefix in 64-bit mode.
2017 */
2018 if (IEM_IS_64BIT_CODE(pVCpu))
2019 {
2020 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2021 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2022 pVCpu->iem.s.uRexB = 1 << 3;
2023 iemRecalEffOpSize(pVCpu);
2024
2025 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2026 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2027 }
2028
2029 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2030 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2031}
2032
2033
2034/**
2035 * @opcode 0x4a
2036 */
2037FNIEMOP_DEF(iemOp_dec_eDX)
2038{
2039 /*
2040 * This is a REX prefix in 64-bit mode.
2041 */
2042 if (IEM_IS_64BIT_CODE(pVCpu))
2043 {
2044 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2045 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2046 pVCpu->iem.s.uRexIndex = 1 << 3;
2047 iemRecalEffOpSize(pVCpu);
2048
2049 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2050 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2051 }
2052
2053 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2054 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2055}
2056
2057
2058/**
2059 * @opcode 0x4b
2060 */
2061FNIEMOP_DEF(iemOp_dec_eBX)
2062{
2063 /*
2064 * This is a REX prefix in 64-bit mode.
2065 */
2066 if (IEM_IS_64BIT_CODE(pVCpu))
2067 {
2068 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2069 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2070 pVCpu->iem.s.uRexB = 1 << 3;
2071 pVCpu->iem.s.uRexIndex = 1 << 3;
2072 iemRecalEffOpSize(pVCpu);
2073
2074 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2075 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2076 }
2077
2078 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2079 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2080}
2081
2082
2083/**
2084 * @opcode 0x4c
2085 */
2086FNIEMOP_DEF(iemOp_dec_eSP)
2087{
2088 /*
2089 * This is a REX prefix in 64-bit mode.
2090 */
2091 if (IEM_IS_64BIT_CODE(pVCpu))
2092 {
2093 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2094 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2095 pVCpu->iem.s.uRexReg = 1 << 3;
2096 iemRecalEffOpSize(pVCpu);
2097
2098 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2099 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2100 }
2101
2102 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2103 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2104}
2105
2106
2107/**
2108 * @opcode 0x4d
2109 */
2110FNIEMOP_DEF(iemOp_dec_eBP)
2111{
2112 /*
2113 * This is a REX prefix in 64-bit mode.
2114 */
2115 if (IEM_IS_64BIT_CODE(pVCpu))
2116 {
2117 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2118 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2119 pVCpu->iem.s.uRexReg = 1 << 3;
2120 pVCpu->iem.s.uRexB = 1 << 3;
2121 iemRecalEffOpSize(pVCpu);
2122
2123 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2124 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2125 }
2126
2127 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2128 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2129}
2130
2131
2132/**
2133 * @opcode 0x4e
2134 */
2135FNIEMOP_DEF(iemOp_dec_eSI)
2136{
2137 /*
2138 * This is a REX prefix in 64-bit mode.
2139 */
2140 if (IEM_IS_64BIT_CODE(pVCpu))
2141 {
2142 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2143 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2144 pVCpu->iem.s.uRexReg = 1 << 3;
2145 pVCpu->iem.s.uRexIndex = 1 << 3;
2146 iemRecalEffOpSize(pVCpu);
2147
2148 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2149 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2150 }
2151
2152 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2153 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2154}
2155
2156
2157/**
2158 * @opcode 0x4f
2159 */
2160FNIEMOP_DEF(iemOp_dec_eDI)
2161{
2162 /*
2163 * This is a REX prefix in 64-bit mode.
2164 */
2165 if (IEM_IS_64BIT_CODE(pVCpu))
2166 {
2167 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2168 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2169 pVCpu->iem.s.uRexReg = 1 << 3;
2170 pVCpu->iem.s.uRexB = 1 << 3;
2171 pVCpu->iem.s.uRexIndex = 1 << 3;
2172 iemRecalEffOpSize(pVCpu);
2173
2174 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2175 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2176 }
2177
2178 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2179 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2180}
2181
2182
2183/**
2184 * Common 'push register' helper.
2185 */
2186FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2187{
2188 if (IEM_IS_64BIT_CODE(pVCpu))
2189 {
2190 iReg |= pVCpu->iem.s.uRexB;
2191 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2192 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2193 }
2194
2195 switch (pVCpu->iem.s.enmEffOpSize)
2196 {
2197 case IEMMODE_16BIT:
2198 IEM_MC_BEGIN(0, 1, 0, 0);
2199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2200 IEM_MC_LOCAL(uint16_t, u16Value);
2201 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2202 IEM_MC_PUSH_U16(u16Value);
2203 IEM_MC_ADVANCE_RIP_AND_FINISH();
2204 IEM_MC_END();
2205 break;
2206
2207 case IEMMODE_32BIT:
2208 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2210 IEM_MC_LOCAL(uint32_t, u32Value);
2211 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2212 IEM_MC_PUSH_U32(u32Value);
2213 IEM_MC_ADVANCE_RIP_AND_FINISH();
2214 IEM_MC_END();
2215 break;
2216
2217 case IEMMODE_64BIT:
2218 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2220 IEM_MC_LOCAL(uint64_t, u64Value);
2221 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2222 IEM_MC_PUSH_U64(u64Value);
2223 IEM_MC_ADVANCE_RIP_AND_FINISH();
2224 IEM_MC_END();
2225 break;
2226
2227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2228 }
2229}
2230
2231
2232/**
2233 * @opcode 0x50
2234 */
2235FNIEMOP_DEF(iemOp_push_eAX)
2236{
2237 IEMOP_MNEMONIC(push_rAX, "push rAX");
2238 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2239}
2240
2241
2242/**
2243 * @opcode 0x51
2244 */
2245FNIEMOP_DEF(iemOp_push_eCX)
2246{
2247 IEMOP_MNEMONIC(push_rCX, "push rCX");
2248 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2249}
2250
2251
2252/**
2253 * @opcode 0x52
2254 */
2255FNIEMOP_DEF(iemOp_push_eDX)
2256{
2257 IEMOP_MNEMONIC(push_rDX, "push rDX");
2258 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2259}
2260
2261
2262/**
2263 * @opcode 0x53
2264 */
2265FNIEMOP_DEF(iemOp_push_eBX)
2266{
2267 IEMOP_MNEMONIC(push_rBX, "push rBX");
2268 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2269}
2270
2271
2272/**
2273 * @opcode 0x54
2274 */
2275FNIEMOP_DEF(iemOp_push_eSP)
2276{
2277 IEMOP_MNEMONIC(push_rSP, "push rSP");
2278 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
2279 {
2280 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086, 0);
2281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2282 IEM_MC_LOCAL(uint16_t, u16Value);
2283 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2284 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2285 IEM_MC_PUSH_U16(u16Value);
2286 IEM_MC_ADVANCE_RIP_AND_FINISH();
2287 IEM_MC_END();
2288 }
2289 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2290}
2291
2292
2293/**
2294 * @opcode 0x55
2295 */
2296FNIEMOP_DEF(iemOp_push_eBP)
2297{
2298 IEMOP_MNEMONIC(push_rBP, "push rBP");
2299 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2300}
2301
2302
2303/**
2304 * @opcode 0x56
2305 */
2306FNIEMOP_DEF(iemOp_push_eSI)
2307{
2308 IEMOP_MNEMONIC(push_rSI, "push rSI");
2309 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2310}
2311
2312
2313/**
2314 * @opcode 0x57
2315 */
2316FNIEMOP_DEF(iemOp_push_eDI)
2317{
2318 IEMOP_MNEMONIC(push_rDI, "push rDI");
2319 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2320}
2321
2322
2323/**
2324 * Common 'pop register' helper.
2325 */
2326FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2327{
2328 if (IEM_IS_64BIT_CODE(pVCpu))
2329 {
2330 iReg |= pVCpu->iem.s.uRexB;
2331 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2332 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2333 }
2334
2335 switch (pVCpu->iem.s.enmEffOpSize)
2336 {
2337 case IEMMODE_16BIT:
2338 IEM_MC_BEGIN(0, 1, 0, 0);
2339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2340 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2341 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
2342 IEM_MC_POP_U16(pu16Dst);
2343 IEM_MC_ADVANCE_RIP_AND_FINISH();
2344 IEM_MC_END();
2345 break;
2346
2347 case IEMMODE_32BIT:
2348 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2350 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2351 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
2352 IEM_MC_POP_U32(pu32Dst);
2353 IEM_MC_CLEAR_HIGH_GREG_U64(iReg); /** @todo testcase*/
2354 IEM_MC_ADVANCE_RIP_AND_FINISH();
2355 IEM_MC_END();
2356 break;
2357
2358 case IEMMODE_64BIT:
2359 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2361 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2362 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
2363 IEM_MC_POP_U64(pu64Dst);
2364 IEM_MC_ADVANCE_RIP_AND_FINISH();
2365 IEM_MC_END();
2366 break;
2367
2368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2369 }
2370}
2371
2372
2373/**
2374 * @opcode 0x58
2375 */
2376FNIEMOP_DEF(iemOp_pop_eAX)
2377{
2378 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2379 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2380}
2381
2382
2383/**
2384 * @opcode 0x59
2385 */
2386FNIEMOP_DEF(iemOp_pop_eCX)
2387{
2388 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2389 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2390}
2391
2392
2393/**
2394 * @opcode 0x5a
2395 */
2396FNIEMOP_DEF(iemOp_pop_eDX)
2397{
2398 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2399 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2400}
2401
2402
2403/**
2404 * @opcode 0x5b
2405 */
2406FNIEMOP_DEF(iemOp_pop_eBX)
2407{
2408 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2409 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2410}
2411
2412
2413/**
2414 * @opcode 0x5c
2415 */
2416FNIEMOP_DEF(iemOp_pop_eSP)
2417{
2418 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2419 if (IEM_IS_64BIT_CODE(pVCpu))
2420 {
2421 if (pVCpu->iem.s.uRexB)
2422 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2423 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2424 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2425 }
2426
2427 /** @todo add testcase for this instruction. */
2428 switch (pVCpu->iem.s.enmEffOpSize)
2429 {
2430 case IEMMODE_16BIT:
2431 IEM_MC_BEGIN(0, 1, 0, 0);
2432 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2433 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2434 IEM_MC_LOCAL(uint16_t, u16Dst);
2435 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
2436 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
2437 IEM_MC_ADVANCE_RIP_AND_FINISH();
2438 IEM_MC_END();
2439 break;
2440
2441 case IEMMODE_32BIT:
2442 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
2443 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2444 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2445 IEM_MC_LOCAL(uint32_t, u32Dst);
2446 IEM_MC_POP_U32(&u32Dst);
2447 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
2448 IEM_MC_ADVANCE_RIP_AND_FINISH();
2449 IEM_MC_END();
2450 break;
2451
2452 case IEMMODE_64BIT:
2453 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2454 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2455 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2456 IEM_MC_LOCAL(uint64_t, u64Dst);
2457 IEM_MC_POP_U64(&u64Dst);
2458 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
2459 IEM_MC_ADVANCE_RIP_AND_FINISH();
2460 IEM_MC_END();
2461 break;
2462
2463 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2464 }
2465}
2466
2467
2468/**
2469 * @opcode 0x5d
2470 */
2471FNIEMOP_DEF(iemOp_pop_eBP)
2472{
2473 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2474 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2475}
2476
2477
2478/**
2479 * @opcode 0x5e
2480 */
2481FNIEMOP_DEF(iemOp_pop_eSI)
2482{
2483 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2484 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2485}
2486
2487
2488/**
2489 * @opcode 0x5f
2490 */
2491FNIEMOP_DEF(iemOp_pop_eDI)
2492{
2493 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2494 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2495}
2496
2497
2498/**
2499 * @opcode 0x60
2500 */
2501FNIEMOP_DEF(iemOp_pusha)
2502{
2503 IEMOP_MNEMONIC(pusha, "pusha");
2504 IEMOP_HLP_MIN_186();
2505 IEMOP_HLP_NO_64BIT();
2506 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2507 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_16);
2508 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2509 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_32);
2510}
2511
2512
2513/**
2514 * @opcode 0x61
2515 */
2516FNIEMOP_DEF(iemOp_popa__mvex)
2517{
2518 if (!IEM_IS_64BIT_CODE(pVCpu))
2519 {
2520 IEMOP_MNEMONIC(popa, "popa");
2521 IEMOP_HLP_MIN_186();
2522 IEMOP_HLP_NO_64BIT();
2523 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2524 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_popa_16);
2525 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2526 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_popa_32);
2527 }
2528 IEMOP_MNEMONIC(mvex, "mvex");
2529 Log(("mvex prefix is not supported!\n"));
2530 IEMOP_RAISE_INVALID_OPCODE_RET();
2531}
2532
2533
2534/**
2535 * @opcode 0x62
2536 * @opmnemonic bound
2537 * @op1 Gv_RO
2538 * @op2 Ma
2539 * @opmincpu 80186
2540 * @ophints harmless x86_invalid_64
2541 * @optest op1=0 op2=0 ->
2542 * @optest op1=1 op2=0 -> value.xcpt=5
2543 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2544 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2545 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2546 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2547 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2548 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2549 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2550 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2551 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2552 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2553 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2554 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2555 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2556 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2557 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2558 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2559 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2560 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2561 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2562 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2563 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2564 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2565 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2566 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2567 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2568 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2569 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2570 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2571 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2572 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2573 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2574 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2575 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2576 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2577 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2578 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2579 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2580 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2581 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2582 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2583 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2584 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2585 */
2586FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2587{
2588 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2589 compatability mode it is invalid with MOD=3.
2590
2591 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2592 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2593 given as R and X without an exact description, so we assume it builds on
2594 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2595 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2596 uint8_t bRm;
2597 if (!IEM_IS_64BIT_CODE(pVCpu))
2598 {
2599 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2600 IEMOP_HLP_MIN_186();
2601 IEM_OPCODE_GET_NEXT_U8(&bRm);
2602 if (IEM_IS_MODRM_MEM_MODE(bRm))
2603 {
2604 /** @todo testcase: check that there are two memory accesses involved. Check
2605 * whether they're both read before the \#BR triggers. */
2606 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2607 {
2608 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
2609 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2610 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2611 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2613
2614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2616
2617 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2618 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2619 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2620
2621 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2622 IEM_MC_END();
2623 }
2624 else /* 32-bit operands */
2625 {
2626 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2627 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2628 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2629 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2631
2632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2634
2635 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2636 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2637 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2638
2639 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2640 IEM_MC_END();
2641 }
2642 }
2643
2644 /*
2645 * @opdone
2646 */
2647 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2648 {
2649 /* Note that there is no need for the CPU to fetch further bytes
2650 here because MODRM.MOD == 3. */
2651 Log(("evex not supported by the guest CPU!\n"));
2652 IEMOP_RAISE_INVALID_OPCODE_RET();
2653 }
2654 }
2655 else
2656 {
2657 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2658 * does modr/m read, whereas AMD probably doesn't... */
2659 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2660 {
2661 Log(("evex not supported by the guest CPU!\n"));
2662 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2663 }
2664 IEM_OPCODE_GET_NEXT_U8(&bRm);
2665 }
2666
2667 IEMOP_MNEMONIC(evex, "evex");
2668 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2669 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2670 Log(("evex prefix is not implemented!\n"));
2671 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2672}
2673
2674
2675/** Opcode 0x63 - non-64-bit modes. */
2676FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2677{
2678 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2679 IEMOP_HLP_MIN_286();
2680 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2681 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2682
2683 if (IEM_IS_MODRM_REG_MODE(bRm))
2684 {
2685 /* Register */
2686 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2687 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2688 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2689 IEM_MC_ARG(uint16_t, u16Src, 1);
2690 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2691
2692 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2693 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2694 IEM_MC_REF_EFLAGS(pEFlags);
2695 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2696
2697 IEM_MC_ADVANCE_RIP_AND_FINISH();
2698 IEM_MC_END();
2699 }
2700 else
2701 {
2702 /* Memory */
2703 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2704 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2705 IEM_MC_ARG(uint16_t, u16Src, 1);
2706 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2708 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2709
2710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2711 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2712 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2713 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2714 IEM_MC_FETCH_EFLAGS(EFlags);
2715 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2716
2717 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
2718 IEM_MC_COMMIT_EFLAGS(EFlags);
2719 IEM_MC_ADVANCE_RIP_AND_FINISH();
2720 IEM_MC_END();
2721 }
2722}
2723
2724
2725/**
2726 * @opcode 0x63
2727 *
2728 * @note This is a weird one. It works like a regular move instruction if
2729 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2730 * @todo This definitely needs a testcase to verify the odd cases. */
2731FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2732{
2733 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2734
2735 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2737
2738 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2739 {
2740 if (IEM_IS_MODRM_REG_MODE(bRm))
2741 {
2742 /*
2743 * Register to register.
2744 */
2745 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2747 IEM_MC_LOCAL(uint64_t, u64Value);
2748 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2749 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2750 IEM_MC_ADVANCE_RIP_AND_FINISH();
2751 IEM_MC_END();
2752 }
2753 else
2754 {
2755 /*
2756 * We're loading a register from memory.
2757 */
2758 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
2759 IEM_MC_LOCAL(uint64_t, u64Value);
2760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2763 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2764 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2765 IEM_MC_ADVANCE_RIP_AND_FINISH();
2766 IEM_MC_END();
2767 }
2768 }
2769 else
2770 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2771}
2772
2773
2774/**
2775 * @opcode 0x64
2776 * @opmnemonic segfs
2777 * @opmincpu 80386
2778 * @opgroup og_prefixes
2779 */
2780FNIEMOP_DEF(iemOp_seg_FS)
2781{
2782 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2783 IEMOP_HLP_MIN_386();
2784
2785 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2786 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2787
2788 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2789 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2790}
2791
2792
2793/**
2794 * @opcode 0x65
2795 * @opmnemonic seggs
2796 * @opmincpu 80386
2797 * @opgroup og_prefixes
2798 */
2799FNIEMOP_DEF(iemOp_seg_GS)
2800{
2801 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2802 IEMOP_HLP_MIN_386();
2803
2804 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2805 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2806
2807 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2808 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2809}
2810
2811
2812/**
2813 * @opcode 0x66
2814 * @opmnemonic opsize
2815 * @openc prefix
2816 * @opmincpu 80386
2817 * @ophints harmless
2818 * @opgroup og_prefixes
2819 */
2820FNIEMOP_DEF(iemOp_op_size)
2821{
2822 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2823 IEMOP_HLP_MIN_386();
2824
2825 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2826 iemRecalEffOpSize(pVCpu);
2827
2828 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2829 when REPZ or REPNZ are present. */
2830 if (pVCpu->iem.s.idxPrefix == 0)
2831 pVCpu->iem.s.idxPrefix = 1;
2832
2833 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2834 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2835}
2836
2837
2838/**
2839 * @opcode 0x67
2840 * @opmnemonic addrsize
2841 * @openc prefix
2842 * @opmincpu 80386
2843 * @ophints harmless
2844 * @opgroup og_prefixes
2845 */
2846FNIEMOP_DEF(iemOp_addr_size)
2847{
2848 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2849 IEMOP_HLP_MIN_386();
2850
2851 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2852 switch (pVCpu->iem.s.enmDefAddrMode)
2853 {
2854 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2855 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2856 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2857 default: AssertFailed();
2858 }
2859
2860 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2861 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2862}
2863
2864
2865/**
2866 * @opcode 0x68
2867 */
2868FNIEMOP_DEF(iemOp_push_Iz)
2869{
2870 IEMOP_MNEMONIC(push_Iz, "push Iz");
2871 IEMOP_HLP_MIN_186();
2872 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2873 switch (pVCpu->iem.s.enmEffOpSize)
2874 {
2875 case IEMMODE_16BIT:
2876 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
2877 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2879 IEM_MC_PUSH_U16(u16Imm);
2880 IEM_MC_ADVANCE_RIP_AND_FINISH();
2881 IEM_MC_END();
2882 break;
2883
2884 case IEMMODE_32BIT:
2885 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2886 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2888 IEM_MC_PUSH_U32(u32Imm);
2889 IEM_MC_ADVANCE_RIP_AND_FINISH();
2890 IEM_MC_END();
2891 break;
2892
2893 case IEMMODE_64BIT:
2894 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
2895 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2897 IEM_MC_PUSH_U64(u64Imm);
2898 IEM_MC_ADVANCE_RIP_AND_FINISH();
2899 IEM_MC_END();
2900 break;
2901
2902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2903 }
2904}
2905
2906
2907/**
2908 * @opcode 0x69
2909 */
2910FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2911{
2912 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2913 IEMOP_HLP_MIN_186();
2914 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2915 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2916
2917 switch (pVCpu->iem.s.enmEffOpSize)
2918 {
2919 case IEMMODE_16BIT:
2920 {
2921 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2922 if (IEM_IS_MODRM_REG_MODE(bRm))
2923 {
2924 /* register operand */
2925 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2926 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
2927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2928 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2929 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2930 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2931 IEM_MC_LOCAL(uint16_t, u16Tmp);
2932
2933 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2934 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2935 IEM_MC_REF_EFLAGS(pEFlags);
2936 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2937 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2938
2939 IEM_MC_ADVANCE_RIP_AND_FINISH();
2940 IEM_MC_END();
2941 }
2942 else
2943 {
2944 /* memory operand */
2945 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
2946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2948
2949 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2951
2952 IEM_MC_LOCAL(uint16_t, u16Tmp);
2953 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2954
2955 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2956 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
2957 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2958 IEM_MC_REF_EFLAGS(pEFlags);
2959 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2960 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2961
2962 IEM_MC_ADVANCE_RIP_AND_FINISH();
2963 IEM_MC_END();
2964 }
2965 break;
2966 }
2967
2968 case IEMMODE_32BIT:
2969 {
2970 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2971 if (IEM_IS_MODRM_REG_MODE(bRm))
2972 {
2973 /* register operand */
2974 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2975 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
2976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2977 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2978 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2979 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2980 IEM_MC_LOCAL(uint32_t, u32Tmp);
2981
2982 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2983 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2984 IEM_MC_REF_EFLAGS(pEFlags);
2985 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2986 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2987
2988 IEM_MC_ADVANCE_RIP_AND_FINISH();
2989 IEM_MC_END();
2990 }
2991 else
2992 {
2993 /* memory operand */
2994 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
2995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2997
2998 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3000
3001 IEM_MC_LOCAL(uint32_t, u32Tmp);
3002 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3003
3004 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3005 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3006 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3007 IEM_MC_REF_EFLAGS(pEFlags);
3008 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3009 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3010
3011 IEM_MC_ADVANCE_RIP_AND_FINISH();
3012 IEM_MC_END();
3013 }
3014 break;
3015 }
3016
3017 case IEMMODE_64BIT:
3018 {
3019 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3020 if (IEM_IS_MODRM_REG_MODE(bRm))
3021 {
3022 /* register operand */
3023 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3024 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3026 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3027 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
3028 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3029 IEM_MC_LOCAL(uint64_t, u64Tmp);
3030
3031 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3032 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3033 IEM_MC_REF_EFLAGS(pEFlags);
3034 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3035 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3036
3037 IEM_MC_ADVANCE_RIP_AND_FINISH();
3038 IEM_MC_END();
3039 }
3040 else
3041 {
3042 /* memory operand */
3043 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3046
3047 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /* parameter count for the threaded function for this block. */
3049
3050 IEM_MC_LOCAL(uint64_t, u64Tmp);
3051 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3052
3053 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3054 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int32_t)u32Imm, 1);
3055 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3056 IEM_MC_REF_EFLAGS(pEFlags);
3057 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3058 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3059
3060 IEM_MC_ADVANCE_RIP_AND_FINISH();
3061 IEM_MC_END();
3062 }
3063 break;
3064 }
3065
3066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3067 }
3068}
3069
3070
3071/**
3072 * @opcode 0x6a
3073 */
3074FNIEMOP_DEF(iemOp_push_Ib)
3075{
3076 IEMOP_MNEMONIC(push_Ib, "push Ib");
3077 IEMOP_HLP_MIN_186();
3078 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3079 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3080
3081 switch (pVCpu->iem.s.enmEffOpSize)
3082 {
3083 case IEMMODE_16BIT:
3084 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
3085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3086 IEM_MC_PUSH_U16(i8Imm);
3087 IEM_MC_ADVANCE_RIP_AND_FINISH();
3088 IEM_MC_END();
3089 break;
3090 case IEMMODE_32BIT:
3091 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3093 IEM_MC_PUSH_U32(i8Imm);
3094 IEM_MC_ADVANCE_RIP_AND_FINISH();
3095 IEM_MC_END();
3096 break;
3097 case IEMMODE_64BIT:
3098 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
3099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3100 IEM_MC_PUSH_U64(i8Imm);
3101 IEM_MC_ADVANCE_RIP_AND_FINISH();
3102 IEM_MC_END();
3103 break;
3104 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3105 }
3106}
3107
3108
3109/**
3110 * @opcode 0x6b
3111 */
3112FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3113{
3114 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3115 IEMOP_HLP_MIN_186();
3116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3117 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3118
3119 switch (pVCpu->iem.s.enmEffOpSize)
3120 {
3121 case IEMMODE_16BIT:
3122 {
3123 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3124 if (IEM_IS_MODRM_REG_MODE(bRm))
3125 {
3126 /* register operand */
3127 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3128 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3130 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3131 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3133 IEM_MC_LOCAL(uint16_t, u16Tmp);
3134
3135 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3136 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3137 IEM_MC_REF_EFLAGS(pEFlags);
3138 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3139 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3140
3141 IEM_MC_ADVANCE_RIP_AND_FINISH();
3142 IEM_MC_END();
3143 }
3144 else
3145 {
3146 /* memory operand */
3147 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3148
3149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3151
3152 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3154
3155 IEM_MC_LOCAL(uint16_t, u16Tmp);
3156 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3157
3158 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3159 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3160 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3161 IEM_MC_REF_EFLAGS(pEFlags);
3162 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3163 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3164
3165 IEM_MC_ADVANCE_RIP_AND_FINISH();
3166 IEM_MC_END();
3167 }
3168 break;
3169 }
3170
3171 case IEMMODE_32BIT:
3172 {
3173 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3174 if (IEM_IS_MODRM_REG_MODE(bRm))
3175 {
3176 /* register operand */
3177 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3178 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3180 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3181 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3182 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3183 IEM_MC_LOCAL(uint32_t, u32Tmp);
3184
3185 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3186 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3187 IEM_MC_REF_EFLAGS(pEFlags);
3188 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3189 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3190
3191 IEM_MC_ADVANCE_RIP_AND_FINISH();
3192 IEM_MC_END();
3193 }
3194 else
3195 {
3196 /* memory operand */
3197 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3200
3201 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3203
3204 IEM_MC_LOCAL(uint32_t, u32Tmp);
3205 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3206
3207 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3208 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3209 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3210 IEM_MC_REF_EFLAGS(pEFlags);
3211 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3212 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3213
3214 IEM_MC_ADVANCE_RIP_AND_FINISH();
3215 IEM_MC_END();
3216 }
3217 break;
3218 }
3219
3220 case IEMMODE_64BIT:
3221 {
3222 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3223 if (IEM_IS_MODRM_REG_MODE(bRm))
3224 {
3225 /* register operand */
3226 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3227 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3229 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3230 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3231 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3232 IEM_MC_LOCAL(uint64_t, u64Tmp);
3233
3234 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3235 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3236 IEM_MC_REF_EFLAGS(pEFlags);
3237 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3238 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3239
3240 IEM_MC_ADVANCE_RIP_AND_FINISH();
3241 IEM_MC_END();
3242 }
3243 else
3244 {
3245 /* memory operand */
3246 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3249
3250 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the threaded parameter count. */
3251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3252
3253 IEM_MC_LOCAL(uint64_t, u64Tmp);
3254 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3255
3256 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3257 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3258 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3259 IEM_MC_REF_EFLAGS(pEFlags);
3260 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3261 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3262
3263 IEM_MC_ADVANCE_RIP_AND_FINISH();
3264 IEM_MC_END();
3265 }
3266 break;
3267 }
3268
3269 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3270 }
3271}
3272
3273
3274/**
3275 * @opcode 0x6c
3276 */
3277FNIEMOP_DEF(iemOp_insb_Yb_DX)
3278{
3279 IEMOP_HLP_MIN_186();
3280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3281 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3282 {
3283 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3284 switch (pVCpu->iem.s.enmEffAddrMode)
3285 {
3286 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3287 iemCImpl_rep_ins_op8_addr16, false);
3288 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3289 iemCImpl_rep_ins_op8_addr32, false);
3290 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3291 iemCImpl_rep_ins_op8_addr64, false);
3292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3293 }
3294 }
3295 else
3296 {
3297 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3298 switch (pVCpu->iem.s.enmEffAddrMode)
3299 {
3300 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3301 iemCImpl_ins_op8_addr16, false);
3302 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3303 iemCImpl_ins_op8_addr32, false);
3304 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3305 iemCImpl_ins_op8_addr64, false);
3306 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3307 }
3308 }
3309}
3310
3311
3312/**
3313 * @opcode 0x6d
3314 */
3315FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3316{
3317 IEMOP_HLP_MIN_186();
3318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3319 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3320 {
3321 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3322 switch (pVCpu->iem.s.enmEffOpSize)
3323 {
3324 case IEMMODE_16BIT:
3325 switch (pVCpu->iem.s.enmEffAddrMode)
3326 {
3327 case IEMMODE_16BIT:
3328 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3329 iemCImpl_rep_ins_op16_addr16, false);
3330 case IEMMODE_32BIT:
3331 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3332 iemCImpl_rep_ins_op16_addr32, false);
3333 case IEMMODE_64BIT:
3334 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3335 iemCImpl_rep_ins_op16_addr64, false);
3336 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3337 }
3338 break;
3339 case IEMMODE_64BIT:
3340 case IEMMODE_32BIT:
3341 switch (pVCpu->iem.s.enmEffAddrMode)
3342 {
3343 case IEMMODE_16BIT:
3344 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3345 iemCImpl_rep_ins_op32_addr16, false);
3346 case IEMMODE_32BIT:
3347 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3348 iemCImpl_rep_ins_op32_addr32, false);
3349 case IEMMODE_64BIT:
3350 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3351 iemCImpl_rep_ins_op32_addr64, false);
3352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3353 }
3354 break;
3355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3356 }
3357 }
3358 else
3359 {
3360 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3361 switch (pVCpu->iem.s.enmEffOpSize)
3362 {
3363 case IEMMODE_16BIT:
3364 switch (pVCpu->iem.s.enmEffAddrMode)
3365 {
3366 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3367 iemCImpl_ins_op16_addr16, false);
3368 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3369 iemCImpl_ins_op16_addr32, false);
3370 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3371 iemCImpl_ins_op16_addr64, false);
3372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3373 }
3374 break;
3375 case IEMMODE_64BIT:
3376 case IEMMODE_32BIT:
3377 switch (pVCpu->iem.s.enmEffAddrMode)
3378 {
3379 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3380 iemCImpl_ins_op32_addr16, false);
3381 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3382 iemCImpl_ins_op32_addr32, false);
3383 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3384 iemCImpl_ins_op32_addr64, false);
3385 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3386 }
3387 break;
3388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3389 }
3390 }
3391}
3392
3393
3394/**
3395 * @opcode 0x6e
3396 */
3397FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3398{
3399 IEMOP_HLP_MIN_186();
3400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3401 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3402 {
3403 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3404 switch (pVCpu->iem.s.enmEffAddrMode)
3405 {
3406 case IEMMODE_16BIT:
3407 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3408 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3409 case IEMMODE_32BIT:
3410 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3411 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3412 case IEMMODE_64BIT:
3413 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3414 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3415 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3416 }
3417 }
3418 else
3419 {
3420 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3421 switch (pVCpu->iem.s.enmEffAddrMode)
3422 {
3423 case IEMMODE_16BIT:
3424 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3425 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3426 case IEMMODE_32BIT:
3427 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3428 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3429 case IEMMODE_64BIT:
3430 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3431 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3432 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3433 }
3434 }
3435}
3436
3437
3438/**
3439 * @opcode 0x6f
3440 */
3441FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3442{
3443 IEMOP_HLP_MIN_186();
3444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3445 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3446 {
3447 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3448 switch (pVCpu->iem.s.enmEffOpSize)
3449 {
3450 case IEMMODE_16BIT:
3451 switch (pVCpu->iem.s.enmEffAddrMode)
3452 {
3453 case IEMMODE_16BIT:
3454 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3455 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3456 case IEMMODE_32BIT:
3457 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3458 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3459 case IEMMODE_64BIT:
3460 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3461 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3463 }
3464 break;
3465 case IEMMODE_64BIT:
3466 case IEMMODE_32BIT:
3467 switch (pVCpu->iem.s.enmEffAddrMode)
3468 {
3469 case IEMMODE_16BIT:
3470 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3471 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3472 case IEMMODE_32BIT:
3473 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3474 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3475 case IEMMODE_64BIT:
3476 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3477 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3479 }
3480 break;
3481 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3482 }
3483 }
3484 else
3485 {
3486 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3487 switch (pVCpu->iem.s.enmEffOpSize)
3488 {
3489 case IEMMODE_16BIT:
3490 switch (pVCpu->iem.s.enmEffAddrMode)
3491 {
3492 case IEMMODE_16BIT:
3493 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3494 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3495 case IEMMODE_32BIT:
3496 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3497 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3498 case IEMMODE_64BIT:
3499 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3500 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3502 }
3503 break;
3504 case IEMMODE_64BIT:
3505 case IEMMODE_32BIT:
3506 switch (pVCpu->iem.s.enmEffAddrMode)
3507 {
3508 case IEMMODE_16BIT:
3509 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3510 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3511 case IEMMODE_32BIT:
3512 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3513 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3514 case IEMMODE_64BIT:
3515 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3516 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3517 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3518 }
3519 break;
3520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3521 }
3522 }
3523}
3524
3525
3526/**
3527 * @opcode 0x70
3528 */
3529FNIEMOP_DEF(iemOp_jo_Jb)
3530{
3531 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3532 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3533 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3534
3535 IEM_MC_BEGIN(0, 0, 0, 0);
3536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3537 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3538 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3539 } IEM_MC_ELSE() {
3540 IEM_MC_ADVANCE_RIP_AND_FINISH();
3541 } IEM_MC_ENDIF();
3542 IEM_MC_END();
3543}
3544
3545
3546/**
3547 * @opcode 0x71
3548 */
3549FNIEMOP_DEF(iemOp_jno_Jb)
3550{
3551 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3552 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3553 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3554
3555 IEM_MC_BEGIN(0, 0, 0, 0);
3556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3557 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3558 IEM_MC_ADVANCE_RIP_AND_FINISH();
3559 } IEM_MC_ELSE() {
3560 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3561 } IEM_MC_ENDIF();
3562 IEM_MC_END();
3563}
3564
3565/**
3566 * @opcode 0x72
3567 */
3568FNIEMOP_DEF(iemOp_jc_Jb)
3569{
3570 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3571 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3572 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3573
3574 IEM_MC_BEGIN(0, 0, 0, 0);
3575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3576 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3577 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3578 } IEM_MC_ELSE() {
3579 IEM_MC_ADVANCE_RIP_AND_FINISH();
3580 } IEM_MC_ENDIF();
3581 IEM_MC_END();
3582}
3583
3584
3585/**
3586 * @opcode 0x73
3587 */
3588FNIEMOP_DEF(iemOp_jnc_Jb)
3589{
3590 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3591 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3592 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3593
3594 IEM_MC_BEGIN(0, 0, 0, 0);
3595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3596 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3597 IEM_MC_ADVANCE_RIP_AND_FINISH();
3598 } IEM_MC_ELSE() {
3599 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3600 } IEM_MC_ENDIF();
3601 IEM_MC_END();
3602}
3603
3604
3605/**
3606 * @opcode 0x74
3607 */
3608FNIEMOP_DEF(iemOp_je_Jb)
3609{
3610 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3611 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3612 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3613
3614 IEM_MC_BEGIN(0, 0, 0, 0);
3615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3616 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3617 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3618 } IEM_MC_ELSE() {
3619 IEM_MC_ADVANCE_RIP_AND_FINISH();
3620 } IEM_MC_ENDIF();
3621 IEM_MC_END();
3622}
3623
3624
3625/**
3626 * @opcode 0x75
3627 */
3628FNIEMOP_DEF(iemOp_jne_Jb)
3629{
3630 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3631 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3632 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3633
3634 IEM_MC_BEGIN(0, 0, 0, 0);
3635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3636 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3637 IEM_MC_ADVANCE_RIP_AND_FINISH();
3638 } IEM_MC_ELSE() {
3639 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3640 } IEM_MC_ENDIF();
3641 IEM_MC_END();
3642}
3643
3644
3645/**
3646 * @opcode 0x76
3647 */
3648FNIEMOP_DEF(iemOp_jbe_Jb)
3649{
3650 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3651 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3652 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3653
3654 IEM_MC_BEGIN(0, 0, 0, 0);
3655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3656 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3657 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3658 } IEM_MC_ELSE() {
3659 IEM_MC_ADVANCE_RIP_AND_FINISH();
3660 } IEM_MC_ENDIF();
3661 IEM_MC_END();
3662}
3663
3664
3665/**
3666 * @opcode 0x77
3667 */
3668FNIEMOP_DEF(iemOp_jnbe_Jb)
3669{
3670 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3671 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3672 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3673
3674 IEM_MC_BEGIN(0, 0, 0, 0);
3675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3676 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3677 IEM_MC_ADVANCE_RIP_AND_FINISH();
3678 } IEM_MC_ELSE() {
3679 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3680 } IEM_MC_ENDIF();
3681 IEM_MC_END();
3682}
3683
3684
3685/**
3686 * @opcode 0x78
3687 */
3688FNIEMOP_DEF(iemOp_js_Jb)
3689{
3690 IEMOP_MNEMONIC(js_Jb, "js Jb");
3691 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3692 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3693
3694 IEM_MC_BEGIN(0, 0, 0, 0);
3695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3696 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3697 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3698 } IEM_MC_ELSE() {
3699 IEM_MC_ADVANCE_RIP_AND_FINISH();
3700 } IEM_MC_ENDIF();
3701 IEM_MC_END();
3702}
3703
3704
3705/**
3706 * @opcode 0x79
3707 */
3708FNIEMOP_DEF(iemOp_jns_Jb)
3709{
3710 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3711 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3712 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3713
3714 IEM_MC_BEGIN(0, 0, 0, 0);
3715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3716 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3717 IEM_MC_ADVANCE_RIP_AND_FINISH();
3718 } IEM_MC_ELSE() {
3719 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3720 } IEM_MC_ENDIF();
3721 IEM_MC_END();
3722}
3723
3724
3725/**
3726 * @opcode 0x7a
3727 */
3728FNIEMOP_DEF(iemOp_jp_Jb)
3729{
3730 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3731 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3732 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3733
3734 IEM_MC_BEGIN(0, 0, 0, 0);
3735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3736 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3737 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3738 } IEM_MC_ELSE() {
3739 IEM_MC_ADVANCE_RIP_AND_FINISH();
3740 } IEM_MC_ENDIF();
3741 IEM_MC_END();
3742}
3743
3744
3745/**
3746 * @opcode 0x7b
3747 */
3748FNIEMOP_DEF(iemOp_jnp_Jb)
3749{
3750 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3751 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3752 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3753
3754 IEM_MC_BEGIN(0, 0, 0, 0);
3755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3756 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3757 IEM_MC_ADVANCE_RIP_AND_FINISH();
3758 } IEM_MC_ELSE() {
3759 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3760 } IEM_MC_ENDIF();
3761 IEM_MC_END();
3762}
3763
3764
3765/**
3766 * @opcode 0x7c
3767 */
3768FNIEMOP_DEF(iemOp_jl_Jb)
3769{
3770 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3771 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3772 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3773
3774 IEM_MC_BEGIN(0, 0, 0, 0);
3775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3776 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3777 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3778 } IEM_MC_ELSE() {
3779 IEM_MC_ADVANCE_RIP_AND_FINISH();
3780 } IEM_MC_ENDIF();
3781 IEM_MC_END();
3782}
3783
3784
3785/**
3786 * @opcode 0x7d
3787 */
3788FNIEMOP_DEF(iemOp_jnl_Jb)
3789{
3790 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3791 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3792 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3793
3794 IEM_MC_BEGIN(0, 0, 0, 0);
3795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3796 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3797 IEM_MC_ADVANCE_RIP_AND_FINISH();
3798 } IEM_MC_ELSE() {
3799 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3800 } IEM_MC_ENDIF();
3801 IEM_MC_END();
3802}
3803
3804
3805/**
3806 * @opcode 0x7e
3807 */
3808FNIEMOP_DEF(iemOp_jle_Jb)
3809{
3810 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3811 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3812 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3813
3814 IEM_MC_BEGIN(0, 0, 0, 0);
3815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3816 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3817 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3818 } IEM_MC_ELSE() {
3819 IEM_MC_ADVANCE_RIP_AND_FINISH();
3820 } IEM_MC_ENDIF();
3821 IEM_MC_END();
3822}
3823
3824
3825/**
3826 * @opcode 0x7f
3827 */
3828FNIEMOP_DEF(iemOp_jnle_Jb)
3829{
3830 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3831 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3832 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3833
3834 IEM_MC_BEGIN(0, 0, 0, 0);
3835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3836 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3837 IEM_MC_ADVANCE_RIP_AND_FINISH();
3838 } IEM_MC_ELSE() {
3839 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3840 } IEM_MC_ENDIF();
3841 IEM_MC_END();
3842}
3843
3844
3845/**
3846 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3847 * iemOp_Grp1_Eb_Ib_80.
3848 */
3849#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
3850 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3851 { \
3852 /* register target */ \
3853 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3854 IEM_MC_BEGIN(3, 0, 0, 0); \
3855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3856 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3857 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3858 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3859 \
3860 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3861 IEM_MC_REF_EFLAGS(pEFlags); \
3862 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3863 \
3864 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3865 IEM_MC_END(); \
3866 } \
3867 else \
3868 { \
3869 /* memory target */ \
3870 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3871 { \
3872 IEM_MC_BEGIN(3, 3, 0, 0); \
3873 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3874 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3875 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3876 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3877 \
3878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3879 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3880 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3881 IEMOP_HLP_DONE_DECODING(); \
3882 \
3883 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3884 IEM_MC_FETCH_EFLAGS(EFlags); \
3885 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3886 \
3887 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
3888 IEM_MC_COMMIT_EFLAGS(EFlags); \
3889 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3890 IEM_MC_END(); \
3891 } \
3892 else \
3893 { \
3894 (void)0
3895
3896#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3897 IEM_MC_BEGIN(3, 3, 0, 0); \
3898 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3899 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3901 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3902 \
3903 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3904 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3905 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3906 IEMOP_HLP_DONE_DECODING(); \
3907 \
3908 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3909 IEM_MC_FETCH_EFLAGS(EFlags); \
3910 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
3911 \
3912 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
3913 IEM_MC_COMMIT_EFLAGS(EFlags); \
3914 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3915 IEM_MC_END(); \
3916 } \
3917 } \
3918 (void)0
3919
3920#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
3921 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3922 { \
3923 /* register target */ \
3924 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3925 IEM_MC_BEGIN(3, 0, 0, 0); \
3926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3927 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3928 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3929 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3930 \
3931 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3932 IEM_MC_REF_EFLAGS(pEFlags); \
3933 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3934 \
3935 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3936 IEM_MC_END(); \
3937 } \
3938 else \
3939 { \
3940 /* memory target */ \
3941 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3942 { \
3943 IEM_MC_BEGIN(3, 3, 0, 0); \
3944 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
3945 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3947 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3948 \
3949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3950 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3951 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3952 IEMOP_HLP_DONE_DECODING(); \
3953 \
3954 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3955 IEM_MC_FETCH_EFLAGS(EFlags); \
3956 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3957 \
3958 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
3959 IEM_MC_COMMIT_EFLAGS(EFlags); \
3960 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3961 IEM_MC_END(); \
3962 } \
3963 else \
3964 { \
3965 (void)0
3966
3967#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
3968 IEMOP_HLP_DONE_DECODING(); \
3969 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
3970 } \
3971 } \
3972 (void)0
3973
3974
3975
3976/**
3977 * @opmaps grp1_80,grp1_83
3978 * @opcode /0
3979 */
3980FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
3981{
3982 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
3983 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
3984 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
3985}
3986
3987
3988/**
3989 * @opmaps grp1_80,grp1_83
3990 * @opcode /1
3991 */
3992FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
3993{
3994 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
3995 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
3996 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
3997}
3998
3999
4000/**
4001 * @opmaps grp1_80,grp1_83
4002 * @opcode /2
4003 */
4004FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4005{
4006 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4007 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4008 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4009}
4010
4011
4012/**
4013 * @opmaps grp1_80,grp1_83
4014 * @opcode /3
4015 */
4016FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4017{
4018 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4019 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4020 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4021}
4022
4023
4024/**
4025 * @opmaps grp1_80,grp1_83
4026 * @opcode /4
4027 */
4028FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4029{
4030 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4031 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4032 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4033}
4034
4035
4036/**
4037 * @opmaps grp1_80,grp1_83
4038 * @opcode /5
4039 */
4040FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4041{
4042 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4043 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4044 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4045}
4046
4047
4048/**
4049 * @opmaps grp1_80,grp1_83
4050 * @opcode /6
4051 */
4052FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4053{
4054 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4055 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4056 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4057}
4058
4059
4060/**
4061 * @opmaps grp1_80,grp1_83
4062 * @opcode /7
4063 */
4064FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4065{
4066 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4067 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4068 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4069}
4070
4071
4072/**
4073 * @opcode 0x80
4074 */
4075FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4076{
4077 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4078 switch (IEM_GET_MODRM_REG_8(bRm))
4079 {
4080 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4081 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4082 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4083 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4084 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4085 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4086 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4087 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4088 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4089 }
4090}
4091
4092
4093/**
4094 * Body for a group 1 binary operator.
4095 */
4096#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4097 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4098 { \
4099 /* register target */ \
4100 switch (pVCpu->iem.s.enmEffOpSize) \
4101 { \
4102 case IEMMODE_16BIT: \
4103 { \
4104 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4105 IEM_MC_BEGIN(3, 0, 0, 0); \
4106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4107 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4108 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4109 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4110 \
4111 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4112 IEM_MC_REF_EFLAGS(pEFlags); \
4113 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4114 \
4115 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4116 IEM_MC_END(); \
4117 break; \
4118 } \
4119 \
4120 case IEMMODE_32BIT: \
4121 { \
4122 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4123 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4125 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4126 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4127 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4128 \
4129 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4130 IEM_MC_REF_EFLAGS(pEFlags); \
4131 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4132 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4133 \
4134 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4135 IEM_MC_END(); \
4136 break; \
4137 } \
4138 \
4139 case IEMMODE_64BIT: \
4140 { \
4141 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4142 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4144 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4145 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4146 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4147 \
4148 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4149 IEM_MC_REF_EFLAGS(pEFlags); \
4150 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4151 \
4152 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4153 IEM_MC_END(); \
4154 break; \
4155 } \
4156 \
4157 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4158 } \
4159 } \
4160 else \
4161 { \
4162 /* memory target */ \
4163 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4164 { \
4165 switch (pVCpu->iem.s.enmEffOpSize) \
4166 { \
4167 case IEMMODE_16BIT: \
4168 { \
4169 IEM_MC_BEGIN(3, 3, 0, 0); \
4170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4172 \
4173 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4174 IEMOP_HLP_DONE_DECODING(); \
4175 \
4176 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4177 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4178 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4179 \
4180 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4181 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4182 IEM_MC_FETCH_EFLAGS(EFlags); \
4183 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4184 \
4185 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4186 IEM_MC_COMMIT_EFLAGS(EFlags); \
4187 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4188 IEM_MC_END(); \
4189 break; \
4190 } \
4191 \
4192 case IEMMODE_32BIT: \
4193 { \
4194 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4197 \
4198 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4199 IEMOP_HLP_DONE_DECODING(); \
4200 \
4201 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4202 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4203 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4204 \
4205 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4206 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4207 IEM_MC_FETCH_EFLAGS(EFlags); \
4208 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4209 \
4210 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4211 IEM_MC_COMMIT_EFLAGS(EFlags); \
4212 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4213 IEM_MC_END(); \
4214 break; \
4215 } \
4216 \
4217 case IEMMODE_64BIT: \
4218 { \
4219 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4220 \
4221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4223 \
4224 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4225 IEMOP_HLP_DONE_DECODING(); \
4226 \
4227 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4228 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4229 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4230 \
4231 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4232 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4233 IEM_MC_FETCH_EFLAGS(EFlags); \
4234 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4235 \
4236 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4237 IEM_MC_COMMIT_EFLAGS(EFlags); \
4238 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4239 IEM_MC_END(); \
4240 break; \
4241 } \
4242 \
4243 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4244 } \
4245 } \
4246 else \
4247 { \
4248 (void)0
4249/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4250#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4251 switch (pVCpu->iem.s.enmEffOpSize) \
4252 { \
4253 case IEMMODE_16BIT: \
4254 { \
4255 IEM_MC_BEGIN(3, 3, 0, 0); \
4256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4258 \
4259 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4260 IEMOP_HLP_DONE_DECODING(); \
4261 \
4262 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4263 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4264 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4265 \
4266 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4267 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4268 IEM_MC_FETCH_EFLAGS(EFlags); \
4269 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4270 \
4271 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4272 IEM_MC_COMMIT_EFLAGS(EFlags); \
4273 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4274 IEM_MC_END(); \
4275 break; \
4276 } \
4277 \
4278 case IEMMODE_32BIT: \
4279 { \
4280 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4283 \
4284 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4285 IEMOP_HLP_DONE_DECODING(); \
4286 \
4287 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4288 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4289 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4290 \
4291 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4292 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4293 IEM_MC_FETCH_EFLAGS(EFlags); \
4294 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4295 \
4296 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4297 IEM_MC_COMMIT_EFLAGS(EFlags); \
4298 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4299 IEM_MC_END(); \
4300 break; \
4301 } \
4302 \
4303 case IEMMODE_64BIT: \
4304 { \
4305 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4308 \
4309 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4310 IEMOP_HLP_DONE_DECODING(); \
4311 \
4312 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4313 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4314 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4315 \
4316 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4317 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4318 IEM_MC_FETCH_EFLAGS(EFlags); \
4319 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4320 \
4321 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4322 IEM_MC_COMMIT_EFLAGS(EFlags); \
4323 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4324 IEM_MC_END(); \
4325 break; \
4326 } \
4327 \
4328 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4329 } \
4330 } \
4331 } \
4332 (void)0
4333
4334/* read-only version */
4335#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4336 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4337 { \
4338 /* register target */ \
4339 switch (pVCpu->iem.s.enmEffOpSize) \
4340 { \
4341 case IEMMODE_16BIT: \
4342 { \
4343 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4344 IEM_MC_BEGIN(3, 0, 0, 0); \
4345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4346 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4347 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4348 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4349 \
4350 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4351 IEM_MC_REF_EFLAGS(pEFlags); \
4352 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4353 \
4354 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4355 IEM_MC_END(); \
4356 break; \
4357 } \
4358 \
4359 case IEMMODE_32BIT: \
4360 { \
4361 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4362 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4364 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4365 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4366 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4367 \
4368 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4369 IEM_MC_REF_EFLAGS(pEFlags); \
4370 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4371 \
4372 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4373 IEM_MC_END(); \
4374 break; \
4375 } \
4376 \
4377 case IEMMODE_64BIT: \
4378 { \
4379 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4380 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4382 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4383 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4384 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4385 \
4386 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4387 IEM_MC_REF_EFLAGS(pEFlags); \
4388 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4389 \
4390 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4391 IEM_MC_END(); \
4392 break; \
4393 } \
4394 \
4395 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4396 } \
4397 } \
4398 else \
4399 { \
4400 /* memory target */ \
4401 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4402 { \
4403 switch (pVCpu->iem.s.enmEffOpSize) \
4404 { \
4405 case IEMMODE_16BIT: \
4406 { \
4407 IEM_MC_BEGIN(3, 3, 0, 0); \
4408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4410 \
4411 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4412 IEMOP_HLP_DONE_DECODING(); \
4413 \
4414 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4415 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4416 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4417 \
4418 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4419 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4420 IEM_MC_FETCH_EFLAGS(EFlags); \
4421 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4422 \
4423 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
4424 IEM_MC_COMMIT_EFLAGS(EFlags); \
4425 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4426 IEM_MC_END(); \
4427 break; \
4428 } \
4429 \
4430 case IEMMODE_32BIT: \
4431 { \
4432 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4435 \
4436 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4437 IEMOP_HLP_DONE_DECODING(); \
4438 \
4439 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4440 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4441 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4442 \
4443 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4444 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4445 IEM_MC_FETCH_EFLAGS(EFlags); \
4446 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4447 \
4448 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
4449 IEM_MC_COMMIT_EFLAGS(EFlags); \
4450 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4451 IEM_MC_END(); \
4452 break; \
4453 } \
4454 \
4455 case IEMMODE_64BIT: \
4456 { \
4457 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4460 \
4461 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4462 IEMOP_HLP_DONE_DECODING(); \
4463 \
4464 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4465 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4466 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4467 \
4468 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4469 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4470 IEM_MC_FETCH_EFLAGS(EFlags); \
4471 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4472 \
4473 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
4474 IEM_MC_COMMIT_EFLAGS(EFlags); \
4475 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4476 IEM_MC_END(); \
4477 break; \
4478 } \
4479 \
4480 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4481 } \
4482 } \
4483 else \
4484 { \
4485 IEMOP_HLP_DONE_DECODING(); \
4486 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4487 } \
4488 } \
4489 (void)0
4490
4491
4492/**
4493 * @opmaps grp1_81
4494 * @opcode /0
4495 */
4496FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4497{
4498 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4499 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4500 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4501}
4502
4503
4504/**
4505 * @opmaps grp1_81
4506 * @opcode /1
4507 */
4508FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4509{
4510 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4511 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4512 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4513}
4514
4515
4516/**
4517 * @opmaps grp1_81
4518 * @opcode /2
4519 */
4520FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4521{
4522 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4523 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4524 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4525}
4526
4527
4528/**
4529 * @opmaps grp1_81
4530 * @opcode /3
4531 */
4532FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4533{
4534 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4535 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4536 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4537}
4538
4539
4540/**
4541 * @opmaps grp1_81
4542 * @opcode /4
4543 */
4544FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4545{
4546 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4547 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4548 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4549}
4550
4551
4552/**
4553 * @opmaps grp1_81
4554 * @opcode /5
4555 */
4556FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4557{
4558 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4559 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4560 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4561}
4562
4563
4564/**
4565 * @opmaps grp1_81
4566 * @opcode /6
4567 */
4568FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4569{
4570 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4571 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4572 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4573}
4574
4575
4576/**
4577 * @opmaps grp1_81
4578 * @opcode /7
4579 */
4580FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4581{
4582 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4583 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4584}
4585
4586
4587/**
4588 * @opcode 0x81
4589 */
4590FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4591{
4592 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4593 switch (IEM_GET_MODRM_REG_8(bRm))
4594 {
4595 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4596 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4597 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4598 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4599 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4600 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4601 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4602 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4603 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4604 }
4605}
4606
4607
4608/**
4609 * @opcode 0x82
4610 * @opmnemonic grp1_82
4611 * @opgroup og_groups
4612 */
4613FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4614{
4615 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4616 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4617}
4618
4619
4620/**
4621 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4622 * iemOp_Grp1_Ev_Ib.
4623 */
4624#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4625 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4626 { \
4627 /* \
4628 * Register target \
4629 */ \
4630 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4631 switch (pVCpu->iem.s.enmEffOpSize) \
4632 { \
4633 case IEMMODE_16BIT: \
4634 IEM_MC_BEGIN(3, 0, 0, 0); \
4635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4636 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4637 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4638 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4639 \
4640 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4641 IEM_MC_REF_EFLAGS(pEFlags); \
4642 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4643 \
4644 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4645 IEM_MC_END(); \
4646 break; \
4647 \
4648 case IEMMODE_32BIT: \
4649 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4651 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4652 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4653 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4654 \
4655 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4656 IEM_MC_REF_EFLAGS(pEFlags); \
4657 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4658 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4659 \
4660 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4661 IEM_MC_END(); \
4662 break; \
4663 \
4664 case IEMMODE_64BIT: \
4665 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4667 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4668 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4669 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4670 \
4671 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4672 IEM_MC_REF_EFLAGS(pEFlags); \
4673 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4674 \
4675 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4676 IEM_MC_END(); \
4677 break; \
4678 \
4679 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4680 } \
4681 } \
4682 else \
4683 { \
4684 /* \
4685 * Memory target. \
4686 */ \
4687 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4688 { \
4689 switch (pVCpu->iem.s.enmEffOpSize) \
4690 { \
4691 case IEMMODE_16BIT: \
4692 IEM_MC_BEGIN(3, 3, 0, 0); \
4693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4695 \
4696 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4697 IEMOP_HLP_DONE_DECODING(); \
4698 \
4699 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4700 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4701 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4702 \
4703 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4704 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4705 IEM_MC_FETCH_EFLAGS(EFlags); \
4706 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4707 \
4708 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4709 IEM_MC_COMMIT_EFLAGS(EFlags); \
4710 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4711 IEM_MC_END(); \
4712 break; \
4713 \
4714 case IEMMODE_32BIT: \
4715 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4718 \
4719 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4720 IEMOP_HLP_DONE_DECODING(); \
4721 \
4722 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4723 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4724 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4725 \
4726 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4727 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4728 IEM_MC_FETCH_EFLAGS(EFlags); \
4729 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4730 \
4731 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4732 IEM_MC_COMMIT_EFLAGS(EFlags); \
4733 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4734 IEM_MC_END(); \
4735 break; \
4736 \
4737 case IEMMODE_64BIT: \
4738 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4741 \
4742 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4743 IEMOP_HLP_DONE_DECODING(); \
4744 \
4745 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4746 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4747 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4748 \
4749 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4750 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4751 IEM_MC_FETCH_EFLAGS(EFlags); \
4752 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4753 \
4754 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4755 IEM_MC_COMMIT_EFLAGS(EFlags); \
4756 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4757 IEM_MC_END(); \
4758 break; \
4759 \
4760 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4761 } \
4762 } \
4763 else \
4764 { \
4765 (void)0
4766/* Separate macro to work around parsing issue in IEMAllInstPython.py */
4767#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4768 switch (pVCpu->iem.s.enmEffOpSize) \
4769 { \
4770 case IEMMODE_16BIT: \
4771 IEM_MC_BEGIN(3, 3, 0, 0); \
4772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4774 \
4775 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4776 IEMOP_HLP_DONE_DECODING(); \
4777 \
4778 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4779 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4780 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4781 \
4782 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4783 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4784 IEM_MC_FETCH_EFLAGS(EFlags); \
4785 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4786 \
4787 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4788 IEM_MC_COMMIT_EFLAGS(EFlags); \
4789 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4790 IEM_MC_END(); \
4791 break; \
4792 \
4793 case IEMMODE_32BIT: \
4794 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4797 \
4798 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4799 IEMOP_HLP_DONE_DECODING(); \
4800 \
4801 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4802 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4803 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4804 \
4805 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4806 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4807 IEM_MC_FETCH_EFLAGS(EFlags); \
4808 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4809 \
4810 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4811 IEM_MC_COMMIT_EFLAGS(EFlags); \
4812 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4813 IEM_MC_END(); \
4814 break; \
4815 \
4816 case IEMMODE_64BIT: \
4817 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4820 \
4821 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4822 IEMOP_HLP_DONE_DECODING(); \
4823 \
4824 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4825 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4826 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4827 \
4828 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4829 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4830 IEM_MC_FETCH_EFLAGS(EFlags); \
4831 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4832 \
4833 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4834 IEM_MC_COMMIT_EFLAGS(EFlags); \
4835 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4836 IEM_MC_END(); \
4837 break; \
4838 \
4839 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4840 } \
4841 } \
4842 } \
4843 (void)0
4844
4845/* read-only variant */
4846#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4847 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4848 { \
4849 /* \
4850 * Register target \
4851 */ \
4852 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4853 switch (pVCpu->iem.s.enmEffOpSize) \
4854 { \
4855 case IEMMODE_16BIT: \
4856 IEM_MC_BEGIN(3, 0, 0, 0); \
4857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4858 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4859 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4860 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4861 \
4862 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4863 IEM_MC_REF_EFLAGS(pEFlags); \
4864 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4865 \
4866 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4867 IEM_MC_END(); \
4868 break; \
4869 \
4870 case IEMMODE_32BIT: \
4871 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4873 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4874 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4875 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4876 \
4877 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4878 IEM_MC_REF_EFLAGS(pEFlags); \
4879 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4880 \
4881 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4882 IEM_MC_END(); \
4883 break; \
4884 \
4885 case IEMMODE_64BIT: \
4886 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4888 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4889 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4890 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4891 \
4892 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4893 IEM_MC_REF_EFLAGS(pEFlags); \
4894 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4895 \
4896 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4897 IEM_MC_END(); \
4898 break; \
4899 \
4900 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4901 } \
4902 } \
4903 else \
4904 { \
4905 /* \
4906 * Memory target. \
4907 */ \
4908 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4909 { \
4910 switch (pVCpu->iem.s.enmEffOpSize) \
4911 { \
4912 case IEMMODE_16BIT: \
4913 IEM_MC_BEGIN(3, 3, 0, 0); \
4914 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4916 \
4917 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4918 IEMOP_HLP_DONE_DECODING(); \
4919 \
4920 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4921 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4922 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4923 \
4924 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4925 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4926 IEM_MC_FETCH_EFLAGS(EFlags); \
4927 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4928 \
4929 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
4930 IEM_MC_COMMIT_EFLAGS(EFlags); \
4931 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4932 IEM_MC_END(); \
4933 break; \
4934 \
4935 case IEMMODE_32BIT: \
4936 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4939 \
4940 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4941 IEMOP_HLP_DONE_DECODING(); \
4942 \
4943 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4944 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4945 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4946 \
4947 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4948 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4949 IEM_MC_FETCH_EFLAGS(EFlags); \
4950 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4951 \
4952 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
4953 IEM_MC_COMMIT_EFLAGS(EFlags); \
4954 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4955 IEM_MC_END(); \
4956 break; \
4957 \
4958 case IEMMODE_64BIT: \
4959 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4962 \
4963 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4964 IEMOP_HLP_DONE_DECODING(); \
4965 \
4966 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4967 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4968 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4969 \
4970 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4971 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4972 IEM_MC_FETCH_EFLAGS(EFlags); \
4973 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4974 \
4975 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
4976 IEM_MC_COMMIT_EFLAGS(EFlags); \
4977 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4978 IEM_MC_END(); \
4979 break; \
4980 \
4981 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4982 } \
4983 } \
4984 else \
4985 { \
4986 IEMOP_HLP_DONE_DECODING(); \
4987 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4988 } \
4989 } \
4990 (void)0
4991
4992/**
4993 * @opmaps grp1_83
4994 * @opcode /0
4995 */
4996FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
4997{
4998 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
4999 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
5000 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
5001}
5002
5003
5004/**
5005 * @opmaps grp1_83
5006 * @opcode /1
5007 */
5008FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5009{
5010 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5011 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
5012 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
5013}
5014
5015
5016/**
5017 * @opmaps grp1_83
5018 * @opcode /2
5019 */
5020FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5021{
5022 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5023 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5024 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5025}
5026
5027
5028/**
5029 * @opmaps grp1_83
5030 * @opcode /3
5031 */
5032FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5033{
5034 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5035 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5036 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5037}
5038
5039
5040/**
5041 * @opmaps grp1_83
5042 * @opcode /4
5043 */
5044FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5045{
5046 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5047 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5048 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5049}
5050
5051
5052/**
5053 * @opmaps grp1_83
5054 * @opcode /5
5055 */
5056FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5057{
5058 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5059 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5060 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5061}
5062
5063
5064/**
5065 * @opmaps grp1_83
5066 * @opcode /6
5067 */
5068FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5069{
5070 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5071 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5072 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5073}
5074
5075
5076/**
5077 * @opmaps grp1_83
5078 * @opcode /7
5079 */
5080FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5081{
5082 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5083 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5084}
5085
5086
5087/**
5088 * @opcode 0x83
5089 */
5090FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5091{
5092 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5093 to the 386 even if absent in the intel reference manuals and some
5094 3rd party opcode listings. */
5095 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5096 switch (IEM_GET_MODRM_REG_8(bRm))
5097 {
5098 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5099 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5100 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5101 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5102 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5103 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5104 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5105 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5106 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5107 }
5108}
5109
5110
5111/**
5112 * @opcode 0x84
5113 */
5114FNIEMOP_DEF(iemOp_test_Eb_Gb)
5115{
5116 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5117 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5118 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8);
5119 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
5120}
5121
5122
5123/**
5124 * @opcode 0x85
5125 */
5126FNIEMOP_DEF(iemOp_test_Ev_Gv)
5127{
5128 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5129 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5130 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64);
5131}
5132
5133
5134/**
5135 * @opcode 0x86
5136 */
5137FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5138{
5139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5140 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5141
5142 /*
5143 * If rm is denoting a register, no more instruction bytes.
5144 */
5145 if (IEM_IS_MODRM_REG_MODE(bRm))
5146 {
5147 IEM_MC_BEGIN(0, 2, 0, 0);
5148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5149 IEM_MC_LOCAL(uint8_t, uTmp1);
5150 IEM_MC_LOCAL(uint8_t, uTmp2);
5151
5152 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5153 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5154 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5155 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5156
5157 IEM_MC_ADVANCE_RIP_AND_FINISH();
5158 IEM_MC_END();
5159 }
5160 else
5161 {
5162 /*
5163 * We're accessing memory.
5164 */
5165 IEM_MC_BEGIN(2, 4, 0, 0);
5166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5167 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5168 IEM_MC_LOCAL(uint8_t, uTmpReg);
5169 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
5170 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1);
5171
5172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5174 IEM_MC_MEM_MAP_U8_RW(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5175 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5176 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5177 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
5178 else
5179 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
5180 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Mem, bUnmapInfo);
5181 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5182
5183 IEM_MC_ADVANCE_RIP_AND_FINISH();
5184 IEM_MC_END();
5185 }
5186}
5187
5188
5189/**
5190 * @opcode 0x87
5191 */
5192FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5193{
5194 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5196
5197 /*
5198 * If rm is denoting a register, no more instruction bytes.
5199 */
5200 if (IEM_IS_MODRM_REG_MODE(bRm))
5201 {
5202 switch (pVCpu->iem.s.enmEffOpSize)
5203 {
5204 case IEMMODE_16BIT:
5205 IEM_MC_BEGIN(0, 2, 0, 0);
5206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5207 IEM_MC_LOCAL(uint16_t, uTmp1);
5208 IEM_MC_LOCAL(uint16_t, uTmp2);
5209
5210 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5211 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5212 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5213 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5214
5215 IEM_MC_ADVANCE_RIP_AND_FINISH();
5216 IEM_MC_END();
5217 break;
5218
5219 case IEMMODE_32BIT:
5220 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5222 IEM_MC_LOCAL(uint32_t, uTmp1);
5223 IEM_MC_LOCAL(uint32_t, uTmp2);
5224
5225 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5226 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5227 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5228 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5229
5230 IEM_MC_ADVANCE_RIP_AND_FINISH();
5231 IEM_MC_END();
5232 break;
5233
5234 case IEMMODE_64BIT:
5235 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5237 IEM_MC_LOCAL(uint64_t, uTmp1);
5238 IEM_MC_LOCAL(uint64_t, uTmp2);
5239
5240 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5241 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5242 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5243 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5244
5245 IEM_MC_ADVANCE_RIP_AND_FINISH();
5246 IEM_MC_END();
5247 break;
5248
5249 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5250 }
5251 }
5252 else
5253 {
5254 /*
5255 * We're accessing memory.
5256 */
5257 switch (pVCpu->iem.s.enmEffOpSize)
5258 {
5259 case IEMMODE_16BIT:
5260 IEM_MC_BEGIN(2, 4, 0, 0);
5261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5262 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5263 IEM_MC_LOCAL(uint16_t, uTmpReg);
5264 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
5265 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1);
5266
5267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5269 IEM_MC_MEM_MAP_U16_RW(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5270 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5271 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5272 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
5273 else
5274 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
5275 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Mem, bUnmapInfo);
5276 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5277
5278 IEM_MC_ADVANCE_RIP_AND_FINISH();
5279 IEM_MC_END();
5280 break;
5281
5282 case IEMMODE_32BIT:
5283 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0);
5284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5285 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5286 IEM_MC_LOCAL(uint32_t, uTmpReg);
5287 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
5288 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1);
5289
5290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5292 IEM_MC_MEM_MAP_U32_RW(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5293 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5294 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5295 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
5296 else
5297 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
5298 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Mem, bUnmapInfo);
5299 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5300
5301 IEM_MC_ADVANCE_RIP_AND_FINISH();
5302 IEM_MC_END();
5303 break;
5304
5305 case IEMMODE_64BIT:
5306 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0);
5307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5308 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5309 IEM_MC_LOCAL(uint64_t, uTmpReg);
5310 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
5311 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1);
5312
5313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5315 IEM_MC_MEM_MAP_U64_RW(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5316 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5317 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5318 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
5319 else
5320 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
5321 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Mem, bUnmapInfo);
5322 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5323
5324 IEM_MC_ADVANCE_RIP_AND_FINISH();
5325 IEM_MC_END();
5326 break;
5327
5328 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5329 }
5330 }
5331}
5332
5333
5334/**
5335 * @opcode 0x88
5336 */
5337FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5338{
5339 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5340
5341 uint8_t bRm;
5342 IEM_OPCODE_GET_NEXT_U8(&bRm);
5343
5344 /*
5345 * If rm is denoting a register, no more instruction bytes.
5346 */
5347 if (IEM_IS_MODRM_REG_MODE(bRm))
5348 {
5349 IEM_MC_BEGIN(0, 1, 0, 0);
5350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5351 IEM_MC_LOCAL(uint8_t, u8Value);
5352 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5353 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5354 IEM_MC_ADVANCE_RIP_AND_FINISH();
5355 IEM_MC_END();
5356 }
5357 else
5358 {
5359 /*
5360 * We're writing a register to memory.
5361 */
5362 IEM_MC_BEGIN(0, 2, 0, 0);
5363 IEM_MC_LOCAL(uint8_t, u8Value);
5364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5367 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5368 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5369 IEM_MC_ADVANCE_RIP_AND_FINISH();
5370 IEM_MC_END();
5371 }
5372}
5373
5374
5375/**
5376 * @opcode 0x89
5377 */
5378FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5379{
5380 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5381
5382 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5383
5384 /*
5385 * If rm is denoting a register, no more instruction bytes.
5386 */
5387 if (IEM_IS_MODRM_REG_MODE(bRm))
5388 {
5389 switch (pVCpu->iem.s.enmEffOpSize)
5390 {
5391 case IEMMODE_16BIT:
5392 IEM_MC_BEGIN(0, 1, 0, 0);
5393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5394 IEM_MC_LOCAL(uint16_t, u16Value);
5395 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5396 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5397 IEM_MC_ADVANCE_RIP_AND_FINISH();
5398 IEM_MC_END();
5399 break;
5400
5401 case IEMMODE_32BIT:
5402 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5404 IEM_MC_LOCAL(uint32_t, u32Value);
5405 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5406 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5407 IEM_MC_ADVANCE_RIP_AND_FINISH();
5408 IEM_MC_END();
5409 break;
5410
5411 case IEMMODE_64BIT:
5412 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5414 IEM_MC_LOCAL(uint64_t, u64Value);
5415 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5416 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5417 IEM_MC_ADVANCE_RIP_AND_FINISH();
5418 IEM_MC_END();
5419 break;
5420
5421 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5422 }
5423 }
5424 else
5425 {
5426 /*
5427 * We're writing a register to memory.
5428 */
5429 switch (pVCpu->iem.s.enmEffOpSize)
5430 {
5431 case IEMMODE_16BIT:
5432 IEM_MC_BEGIN(0, 2, 0, 0);
5433 IEM_MC_LOCAL(uint16_t, u16Value);
5434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5437 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5438 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5439 IEM_MC_ADVANCE_RIP_AND_FINISH();
5440 IEM_MC_END();
5441 break;
5442
5443 case IEMMODE_32BIT:
5444 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5445 IEM_MC_LOCAL(uint32_t, u32Value);
5446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5449 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5450 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5451 IEM_MC_ADVANCE_RIP_AND_FINISH();
5452 IEM_MC_END();
5453 break;
5454
5455 case IEMMODE_64BIT:
5456 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5457 IEM_MC_LOCAL(uint64_t, u64Value);
5458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5461 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5462 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5463 IEM_MC_ADVANCE_RIP_AND_FINISH();
5464 IEM_MC_END();
5465 break;
5466
5467 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5468 }
5469 }
5470}
5471
5472
5473/**
5474 * @opcode 0x8a
5475 */
5476FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5477{
5478 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5479
5480 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5481
5482 /*
5483 * If rm is denoting a register, no more instruction bytes.
5484 */
5485 if (IEM_IS_MODRM_REG_MODE(bRm))
5486 {
5487 IEM_MC_BEGIN(0, 1, 0, 0);
5488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5489 IEM_MC_LOCAL(uint8_t, u8Value);
5490 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5491 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5492 IEM_MC_ADVANCE_RIP_AND_FINISH();
5493 IEM_MC_END();
5494 }
5495 else
5496 {
5497 /*
5498 * We're loading a register from memory.
5499 */
5500 IEM_MC_BEGIN(0, 2, 0, 0);
5501 IEM_MC_LOCAL(uint8_t, u8Value);
5502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5505 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5506 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5507 IEM_MC_ADVANCE_RIP_AND_FINISH();
5508 IEM_MC_END();
5509 }
5510}
5511
5512
5513/**
5514 * @opcode 0x8b
5515 */
5516FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5517{
5518 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5519
5520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5521
5522 /*
5523 * If rm is denoting a register, no more instruction bytes.
5524 */
5525 if (IEM_IS_MODRM_REG_MODE(bRm))
5526 {
5527 switch (pVCpu->iem.s.enmEffOpSize)
5528 {
5529 case IEMMODE_16BIT:
5530 IEM_MC_BEGIN(0, 1, 0, 0);
5531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5532 IEM_MC_LOCAL(uint16_t, u16Value);
5533 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5534 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5535 IEM_MC_ADVANCE_RIP_AND_FINISH();
5536 IEM_MC_END();
5537 break;
5538
5539 case IEMMODE_32BIT:
5540 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5542 IEM_MC_LOCAL(uint32_t, u32Value);
5543 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5544 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5545 IEM_MC_ADVANCE_RIP_AND_FINISH();
5546 IEM_MC_END();
5547 break;
5548
5549 case IEMMODE_64BIT:
5550 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5552 IEM_MC_LOCAL(uint64_t, u64Value);
5553 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5554 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5555 IEM_MC_ADVANCE_RIP_AND_FINISH();
5556 IEM_MC_END();
5557 break;
5558
5559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5560 }
5561 }
5562 else
5563 {
5564 /*
5565 * We're loading a register from memory.
5566 */
5567 switch (pVCpu->iem.s.enmEffOpSize)
5568 {
5569 case IEMMODE_16BIT:
5570 IEM_MC_BEGIN(0, 2, 0, 0);
5571 IEM_MC_LOCAL(uint16_t, u16Value);
5572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5575 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5576 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5577 IEM_MC_ADVANCE_RIP_AND_FINISH();
5578 IEM_MC_END();
5579 break;
5580
5581 case IEMMODE_32BIT:
5582 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5583 IEM_MC_LOCAL(uint32_t, u32Value);
5584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5587 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5588 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5589 IEM_MC_ADVANCE_RIP_AND_FINISH();
5590 IEM_MC_END();
5591 break;
5592
5593 case IEMMODE_64BIT:
5594 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5595 IEM_MC_LOCAL(uint64_t, u64Value);
5596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5599 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5600 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5601 IEM_MC_ADVANCE_RIP_AND_FINISH();
5602 IEM_MC_END();
5603 break;
5604
5605 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5606 }
5607 }
5608}
5609
5610
5611/**
5612 * opcode 0x63
5613 * @todo Table fixme
5614 */
5615FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5616{
5617 if (!IEM_IS_64BIT_CODE(pVCpu))
5618 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5619 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5620 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5621 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5622}
5623
5624
5625/**
5626 * @opcode 0x8c
5627 */
5628FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5629{
5630 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5631
5632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5633
5634 /*
5635 * Check that the destination register exists. The REX.R prefix is ignored.
5636 */
5637 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5638 if (iSegReg > X86_SREG_GS)
5639 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5640
5641 /*
5642 * If rm is denoting a register, no more instruction bytes.
5643 * In that case, the operand size is respected and the upper bits are
5644 * cleared (starting with some pentium).
5645 */
5646 if (IEM_IS_MODRM_REG_MODE(bRm))
5647 {
5648 switch (pVCpu->iem.s.enmEffOpSize)
5649 {
5650 case IEMMODE_16BIT:
5651 IEM_MC_BEGIN(0, 1, 0, 0);
5652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5653 IEM_MC_LOCAL(uint16_t, u16Value);
5654 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5655 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5656 IEM_MC_ADVANCE_RIP_AND_FINISH();
5657 IEM_MC_END();
5658 break;
5659
5660 case IEMMODE_32BIT:
5661 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5663 IEM_MC_LOCAL(uint32_t, u32Value);
5664 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5665 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5666 IEM_MC_ADVANCE_RIP_AND_FINISH();
5667 IEM_MC_END();
5668 break;
5669
5670 case IEMMODE_64BIT:
5671 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5673 IEM_MC_LOCAL(uint64_t, u64Value);
5674 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5675 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5676 IEM_MC_ADVANCE_RIP_AND_FINISH();
5677 IEM_MC_END();
5678 break;
5679
5680 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5681 }
5682 }
5683 else
5684 {
5685 /*
5686 * We're saving the register to memory. The access is word sized
5687 * regardless of operand size prefixes.
5688 */
5689#if 0 /* not necessary */
5690 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5691#endif
5692 IEM_MC_BEGIN(0, 2, 0, 0);
5693 IEM_MC_LOCAL(uint16_t, u16Value);
5694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5697 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5698 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5699 IEM_MC_ADVANCE_RIP_AND_FINISH();
5700 IEM_MC_END();
5701 }
5702}
5703
5704
5705
5706
5707/**
5708 * @opcode 0x8d
5709 */
5710FNIEMOP_DEF(iemOp_lea_Gv_M)
5711{
5712 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5713 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5714 if (IEM_IS_MODRM_REG_MODE(bRm))
5715 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5716
5717 switch (pVCpu->iem.s.enmEffOpSize)
5718 {
5719 case IEMMODE_16BIT:
5720 IEM_MC_BEGIN(0, 2, 0, 0);
5721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5724 IEM_MC_LOCAL(uint16_t, u16Cast);
5725 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5726 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5727 IEM_MC_ADVANCE_RIP_AND_FINISH();
5728 IEM_MC_END();
5729 break;
5730
5731 case IEMMODE_32BIT:
5732 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5736 IEM_MC_LOCAL(uint32_t, u32Cast);
5737 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5738 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5739 IEM_MC_ADVANCE_RIP_AND_FINISH();
5740 IEM_MC_END();
5741 break;
5742
5743 case IEMMODE_64BIT:
5744 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5748 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5749 IEM_MC_ADVANCE_RIP_AND_FINISH();
5750 IEM_MC_END();
5751 break;
5752
5753 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5754 }
5755}
5756
5757
5758/**
5759 * @opcode 0x8e
5760 */
5761FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5762{
5763 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5764
5765 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5766
5767 /*
5768 * The practical operand size is 16-bit.
5769 */
5770#if 0 /* not necessary */
5771 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5772#endif
5773
5774 /*
5775 * Check that the destination register exists and can be used with this
5776 * instruction. The REX.R prefix is ignored.
5777 */
5778 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5779 /** @todo r=bird: What does 8086 do here wrt CS? */
5780 if ( iSegReg == X86_SREG_CS
5781 || iSegReg > X86_SREG_GS)
5782 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5783
5784 /*
5785 * If rm is denoting a register, no more instruction bytes.
5786 *
5787 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
5788 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
5789 * register. This is a restriction of the current recompiler
5790 * approach.
5791 */
5792 if (IEM_IS_MODRM_REG_MODE(bRm))
5793 {
5794#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
5795 IEM_MC_BEGIN(2, 0, 0, a_fCImplFlags); \
5796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5797 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5798 IEM_MC_ARG(uint16_t, u16Value, 1); \
5799 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5800 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, iemCImpl_load_SReg, iSRegArg, u16Value); \
5801 IEM_MC_END()
5802
5803 if (iSegReg == X86_SREG_SS)
5804 {
5805 if (IEM_IS_32BIT_CODE(pVCpu))
5806 {
5807 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5808 }
5809 else
5810 {
5811 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5812 }
5813 }
5814 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5815 {
5816 IEMOP_MOV_SW_EV_REG_BODY(0);
5817 }
5818 else
5819 {
5820 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
5821 }
5822#undef IEMOP_MOV_SW_EV_REG_BODY
5823 }
5824 else
5825 {
5826 /*
5827 * We're loading the register from memory. The access is word sized
5828 * regardless of operand size prefixes.
5829 */
5830#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
5831 IEM_MC_BEGIN(2, 1, 0, a_fCImplFlags); \
5832 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5833 IEM_MC_ARG(uint16_t, u16Value, 1); \
5834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5837 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5838 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, iemCImpl_load_SReg, iSRegArg, u16Value); \
5839 IEM_MC_END()
5840
5841 if (iSegReg == X86_SREG_SS)
5842 {
5843 if (IEM_IS_32BIT_CODE(pVCpu))
5844 {
5845 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5846 }
5847 else
5848 {
5849 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5850 }
5851 }
5852 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5853 {
5854 IEMOP_MOV_SW_EV_MEM_BODY(0);
5855 }
5856 else
5857 {
5858 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
5859 }
5860#undef IEMOP_MOV_SW_EV_MEM_BODY
5861 }
5862}
5863
5864
5865/** Opcode 0x8f /0. */
5866FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5867{
5868 /* This bugger is rather annoying as it requires rSP to be updated before
5869 doing the effective address calculations. Will eventually require a
5870 split between the R/M+SIB decoding and the effective address
5871 calculation - which is something that is required for any attempt at
5872 reusing this code for a recompiler. It may also be good to have if we
5873 need to delay #UD exception caused by invalid lock prefixes.
5874
5875 For now, we'll do a mostly safe interpreter-only implementation here. */
5876 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5877 * now until tests show it's checked.. */
5878 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5879
5880 /* Register access is relatively easy and can share code. */
5881 if (IEM_IS_MODRM_REG_MODE(bRm))
5882 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5883
5884 /*
5885 * Memory target.
5886 *
5887 * Intel says that RSP is incremented before it's used in any effective
5888 * address calcuations. This means some serious extra annoyance here since
5889 * we decode and calculate the effective address in one step and like to
5890 * delay committing registers till everything is done.
5891 *
5892 * So, we'll decode and calculate the effective address twice. This will
5893 * require some recoding if turned into a recompiler.
5894 */
5895 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5896
5897#if 1 /* This can be compiled, optimize later if needed. */
5898 switch (pVCpu->iem.s.enmEffOpSize)
5899 {
5900 case IEMMODE_16BIT:
5901 IEM_MC_BEGIN(2, 0, 0, 0);
5902 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5903 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
5904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5905 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
5906 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
5907 IEM_MC_END();
5908 break;
5909
5910 case IEMMODE_32BIT:
5911 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
5912 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
5914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5915 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
5916 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
5917 IEM_MC_END();
5918 break;
5919
5920 case IEMMODE_64BIT:
5921 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
5922 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
5924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5925 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
5926 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
5927 IEM_MC_END();
5928 break;
5929
5930 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5931 }
5932
5933#else
5934# ifndef TST_IEM_CHECK_MC
5935 /* Calc effective address with modified ESP. */
5936/** @todo testcase */
5937 RTGCPTR GCPtrEff;
5938 VBOXSTRICTRC rcStrict;
5939 switch (pVCpu->iem.s.enmEffOpSize)
5940 {
5941 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
5942 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
5943 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
5944 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5945 }
5946 if (rcStrict != VINF_SUCCESS)
5947 return rcStrict;
5948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5949
5950 /* Perform the operation - this should be CImpl. */
5951 RTUINT64U TmpRsp;
5952 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
5953 switch (pVCpu->iem.s.enmEffOpSize)
5954 {
5955 case IEMMODE_16BIT:
5956 {
5957 uint16_t u16Value;
5958 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
5959 if (rcStrict == VINF_SUCCESS)
5960 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
5961 break;
5962 }
5963
5964 case IEMMODE_32BIT:
5965 {
5966 uint32_t u32Value;
5967 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
5968 if (rcStrict == VINF_SUCCESS)
5969 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
5970 break;
5971 }
5972
5973 case IEMMODE_64BIT:
5974 {
5975 uint64_t u64Value;
5976 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
5977 if (rcStrict == VINF_SUCCESS)
5978 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
5979 break;
5980 }
5981
5982 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5983 }
5984 if (rcStrict == VINF_SUCCESS)
5985 {
5986 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
5987 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
5988 }
5989 return rcStrict;
5990
5991# else
5992 return VERR_IEM_IPE_2;
5993# endif
5994#endif
5995}
5996
5997
5998/**
5999 * @opcode 0x8f
6000 */
6001FNIEMOP_DEF(iemOp_Grp1A__xop)
6002{
6003 /*
6004 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
6005 * three byte VEX prefix, except that the mmmmm field cannot have the values
6006 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
6007 */
6008 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6009 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
6010 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
6011
6012 IEMOP_MNEMONIC(xop, "xop");
6013 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
6014 {
6015 /** @todo Test when exctly the XOP conformance checks kick in during
6016 * instruction decoding and fetching (using \#PF). */
6017 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6018 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6019 if ( ( pVCpu->iem.s.fPrefixes
6020 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6021 == 0)
6022 {
6023 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6024 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6025 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6026 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6027 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6028 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6029 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6030 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6031 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6032
6033 /** @todo XOP: Just use new tables and decoders. */
6034 switch (bRm & 0x1f)
6035 {
6036 case 8: /* xop opcode map 8. */
6037 IEMOP_BITCH_ABOUT_STUB();
6038 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6039
6040 case 9: /* xop opcode map 9. */
6041 IEMOP_BITCH_ABOUT_STUB();
6042 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6043
6044 case 10: /* xop opcode map 10. */
6045 IEMOP_BITCH_ABOUT_STUB();
6046 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6047
6048 default:
6049 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6050 IEMOP_RAISE_INVALID_OPCODE_RET();
6051 }
6052 }
6053 else
6054 Log(("XOP: Invalid prefix mix!\n"));
6055 }
6056 else
6057 Log(("XOP: XOP support disabled!\n"));
6058 IEMOP_RAISE_INVALID_OPCODE_RET();
6059}
6060
6061
6062/**
6063 * Common 'xchg reg,rAX' helper.
6064 */
6065FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6066{
6067 iReg |= pVCpu->iem.s.uRexB;
6068 switch (pVCpu->iem.s.enmEffOpSize)
6069 {
6070 case IEMMODE_16BIT:
6071 IEM_MC_BEGIN(0, 2, 0, 0);
6072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6073 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6074 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6075 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6076 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6077 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6078 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6079 IEM_MC_ADVANCE_RIP_AND_FINISH();
6080 IEM_MC_END();
6081 break;
6082
6083 case IEMMODE_32BIT:
6084 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6086 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6087 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6088 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6089 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6090 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6091 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6092 IEM_MC_ADVANCE_RIP_AND_FINISH();
6093 IEM_MC_END();
6094 break;
6095
6096 case IEMMODE_64BIT:
6097 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6099 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6100 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6101 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6102 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6103 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6104 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6105 IEM_MC_ADVANCE_RIP_AND_FINISH();
6106 IEM_MC_END();
6107 break;
6108
6109 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6110 }
6111}
6112
6113
6114/**
6115 * @opcode 0x90
6116 */
6117FNIEMOP_DEF(iemOp_nop)
6118{
6119 /* R8/R8D and RAX/EAX can be exchanged. */
6120 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6121 {
6122 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6123 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6124 }
6125
6126 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6127 {
6128 IEMOP_MNEMONIC(pause, "pause");
6129 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6130 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6131 if (!IEM_IS_IN_GUEST(pVCpu))
6132 { /* probable */ }
6133#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6134 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6135 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmx_pause);
6136#endif
6137#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6138 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6139 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_svm_pause);
6140#endif
6141 }
6142 else
6143 IEMOP_MNEMONIC(nop, "nop");
6144 /** @todo testcase: lock nop; lock pause */
6145 IEM_MC_BEGIN(0, 0, 0, 0);
6146 IEMOP_HLP_DONE_DECODING();
6147 IEM_MC_ADVANCE_RIP_AND_FINISH();
6148 IEM_MC_END();
6149}
6150
6151
6152/**
6153 * @opcode 0x91
6154 */
6155FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6156{
6157 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6158 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6159}
6160
6161
6162/**
6163 * @opcode 0x92
6164 */
6165FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6166{
6167 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6168 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6169}
6170
6171
6172/**
6173 * @opcode 0x93
6174 */
6175FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6176{
6177 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6178 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6179}
6180
6181
6182/**
6183 * @opcode 0x94
6184 */
6185FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6186{
6187 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6188 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6189}
6190
6191
6192/**
6193 * @opcode 0x95
6194 */
6195FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6196{
6197 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6198 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6199}
6200
6201
6202/**
6203 * @opcode 0x96
6204 */
6205FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6206{
6207 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6208 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6209}
6210
6211
6212/**
6213 * @opcode 0x97
6214 */
6215FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6216{
6217 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6218 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6219}
6220
6221
6222/**
6223 * @opcode 0x98
6224 */
6225FNIEMOP_DEF(iemOp_cbw)
6226{
6227 switch (pVCpu->iem.s.enmEffOpSize)
6228 {
6229 case IEMMODE_16BIT:
6230 IEMOP_MNEMONIC(cbw, "cbw");
6231 IEM_MC_BEGIN(0, 1, 0, 0);
6232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6233 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6234 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6235 } IEM_MC_ELSE() {
6236 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6237 } IEM_MC_ENDIF();
6238 IEM_MC_ADVANCE_RIP_AND_FINISH();
6239 IEM_MC_END();
6240 break;
6241
6242 case IEMMODE_32BIT:
6243 IEMOP_MNEMONIC(cwde, "cwde");
6244 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6246 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6247 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6248 } IEM_MC_ELSE() {
6249 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6250 } IEM_MC_ENDIF();
6251 IEM_MC_ADVANCE_RIP_AND_FINISH();
6252 IEM_MC_END();
6253 break;
6254
6255 case IEMMODE_64BIT:
6256 IEMOP_MNEMONIC(cdqe, "cdqe");
6257 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6259 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6260 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6261 } IEM_MC_ELSE() {
6262 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6263 } IEM_MC_ENDIF();
6264 IEM_MC_ADVANCE_RIP_AND_FINISH();
6265 IEM_MC_END();
6266 break;
6267
6268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6269 }
6270}
6271
6272
6273/**
6274 * @opcode 0x99
6275 */
6276FNIEMOP_DEF(iemOp_cwd)
6277{
6278 switch (pVCpu->iem.s.enmEffOpSize)
6279 {
6280 case IEMMODE_16BIT:
6281 IEMOP_MNEMONIC(cwd, "cwd");
6282 IEM_MC_BEGIN(0, 1, 0, 0);
6283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6284 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6285 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6286 } IEM_MC_ELSE() {
6287 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6288 } IEM_MC_ENDIF();
6289 IEM_MC_ADVANCE_RIP_AND_FINISH();
6290 IEM_MC_END();
6291 break;
6292
6293 case IEMMODE_32BIT:
6294 IEMOP_MNEMONIC(cdq, "cdq");
6295 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6297 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6298 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6299 } IEM_MC_ELSE() {
6300 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6301 } IEM_MC_ENDIF();
6302 IEM_MC_ADVANCE_RIP_AND_FINISH();
6303 IEM_MC_END();
6304 break;
6305
6306 case IEMMODE_64BIT:
6307 IEMOP_MNEMONIC(cqo, "cqo");
6308 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6310 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6311 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6312 } IEM_MC_ELSE() {
6313 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6314 } IEM_MC_ENDIF();
6315 IEM_MC_ADVANCE_RIP_AND_FINISH();
6316 IEM_MC_END();
6317 break;
6318
6319 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6320 }
6321}
6322
6323
6324/**
6325 * @opcode 0x9a
6326 */
6327FNIEMOP_DEF(iemOp_call_Ap)
6328{
6329 IEMOP_MNEMONIC(call_Ap, "call Ap");
6330 IEMOP_HLP_NO_64BIT();
6331
6332 /* Decode the far pointer address and pass it on to the far call C implementation. */
6333 uint32_t off32Seg;
6334 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6335 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6336 else
6337 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6338 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6340 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
6341 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
6342 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6343}
6344
6345
6346/** Opcode 0x9b. (aka fwait) */
6347FNIEMOP_DEF(iemOp_wait)
6348{
6349 IEMOP_MNEMONIC(wait, "wait");
6350 IEM_MC_BEGIN(0, 0, 0, 0);
6351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6352 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6353 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6354 IEM_MC_ADVANCE_RIP_AND_FINISH();
6355 IEM_MC_END();
6356}
6357
6358
6359/**
6360 * @opcode 0x9c
6361 */
6362FNIEMOP_DEF(iemOp_pushf_Fv)
6363{
6364 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6366 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6367 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6368}
6369
6370
6371/**
6372 * @opcode 0x9d
6373 */
6374FNIEMOP_DEF(iemOp_popf_Fv)
6375{
6376 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6378 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6379 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6380 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6381}
6382
6383
6384/**
6385 * @opcode 0x9e
6386 */
6387FNIEMOP_DEF(iemOp_sahf)
6388{
6389 IEMOP_MNEMONIC(sahf, "sahf");
6390 if ( IEM_IS_64BIT_CODE(pVCpu)
6391 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6392 IEMOP_RAISE_INVALID_OPCODE_RET();
6393 IEM_MC_BEGIN(0, 2, 0, 0);
6394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6395 IEM_MC_LOCAL(uint32_t, u32Flags);
6396 IEM_MC_LOCAL(uint32_t, EFlags);
6397 IEM_MC_FETCH_EFLAGS(EFlags);
6398 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6399 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6400 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6401 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6402 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6403 IEM_MC_COMMIT_EFLAGS(EFlags);
6404 IEM_MC_ADVANCE_RIP_AND_FINISH();
6405 IEM_MC_END();
6406}
6407
6408
6409/**
6410 * @opcode 0x9f
6411 */
6412FNIEMOP_DEF(iemOp_lahf)
6413{
6414 IEMOP_MNEMONIC(lahf, "lahf");
6415 if ( IEM_IS_64BIT_CODE(pVCpu)
6416 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6417 IEMOP_RAISE_INVALID_OPCODE_RET();
6418 IEM_MC_BEGIN(0, 1, 0, 0);
6419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6420 IEM_MC_LOCAL(uint8_t, u8Flags);
6421 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6422 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6423 IEM_MC_ADVANCE_RIP_AND_FINISH();
6424 IEM_MC_END();
6425}
6426
6427
6428/**
6429 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6430 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6431 * Will return/throw on failures.
6432 * @param a_GCPtrMemOff The variable to store the offset in.
6433 */
6434#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6435 do \
6436 { \
6437 switch (pVCpu->iem.s.enmEffAddrMode) \
6438 { \
6439 case IEMMODE_16BIT: \
6440 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6441 break; \
6442 case IEMMODE_32BIT: \
6443 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6444 break; \
6445 case IEMMODE_64BIT: \
6446 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6447 break; \
6448 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6449 } \
6450 } while (0)
6451
6452/**
6453 * @opcode 0xa0
6454 */
6455FNIEMOP_DEF(iemOp_mov_AL_Ob)
6456{
6457 /*
6458 * Get the offset.
6459 */
6460 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6461 RTGCPTR GCPtrMemOff;
6462 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6463
6464 /*
6465 * Fetch AL.
6466 */
6467 IEM_MC_BEGIN(0, 1, 0, 0);
6468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6469 IEM_MC_LOCAL(uint8_t, u8Tmp);
6470 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6471 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6472 IEM_MC_ADVANCE_RIP_AND_FINISH();
6473 IEM_MC_END();
6474}
6475
6476
6477/**
6478 * @opcode 0xa1
6479 */
6480FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6481{
6482 /*
6483 * Get the offset.
6484 */
6485 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6486 RTGCPTR GCPtrMemOff;
6487 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6488
6489 /*
6490 * Fetch rAX.
6491 */
6492 switch (pVCpu->iem.s.enmEffOpSize)
6493 {
6494 case IEMMODE_16BIT:
6495 IEM_MC_BEGIN(0, 1, 0, 0);
6496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6497 IEM_MC_LOCAL(uint16_t, u16Tmp);
6498 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6499 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6500 IEM_MC_ADVANCE_RIP_AND_FINISH();
6501 IEM_MC_END();
6502 break;
6503
6504 case IEMMODE_32BIT:
6505 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6507 IEM_MC_LOCAL(uint32_t, u32Tmp);
6508 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6509 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6510 IEM_MC_ADVANCE_RIP_AND_FINISH();
6511 IEM_MC_END();
6512 break;
6513
6514 case IEMMODE_64BIT:
6515 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6517 IEM_MC_LOCAL(uint64_t, u64Tmp);
6518 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6519 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6520 IEM_MC_ADVANCE_RIP_AND_FINISH();
6521 IEM_MC_END();
6522 break;
6523
6524 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6525 }
6526}
6527
6528
6529/**
6530 * @opcode 0xa2
6531 */
6532FNIEMOP_DEF(iemOp_mov_Ob_AL)
6533{
6534 /*
6535 * Get the offset.
6536 */
6537 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6538 RTGCPTR GCPtrMemOff;
6539 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6540
6541 /*
6542 * Store AL.
6543 */
6544 IEM_MC_BEGIN(0, 1, 0, 0);
6545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6546 IEM_MC_LOCAL(uint8_t, u8Tmp);
6547 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6548 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6549 IEM_MC_ADVANCE_RIP_AND_FINISH();
6550 IEM_MC_END();
6551}
6552
6553
6554/**
6555 * @opcode 0xa3
6556 */
6557FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6558{
6559 /*
6560 * Get the offset.
6561 */
6562 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6563 RTGCPTR GCPtrMemOff;
6564 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6565
6566 /*
6567 * Store rAX.
6568 */
6569 switch (pVCpu->iem.s.enmEffOpSize)
6570 {
6571 case IEMMODE_16BIT:
6572 IEM_MC_BEGIN(0, 1, 0, 0);
6573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6574 IEM_MC_LOCAL(uint16_t, u16Tmp);
6575 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6576 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6577 IEM_MC_ADVANCE_RIP_AND_FINISH();
6578 IEM_MC_END();
6579 break;
6580
6581 case IEMMODE_32BIT:
6582 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6584 IEM_MC_LOCAL(uint32_t, u32Tmp);
6585 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6586 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6587 IEM_MC_ADVANCE_RIP_AND_FINISH();
6588 IEM_MC_END();
6589 break;
6590
6591 case IEMMODE_64BIT:
6592 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6594 IEM_MC_LOCAL(uint64_t, u64Tmp);
6595 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6596 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6597 IEM_MC_ADVANCE_RIP_AND_FINISH();
6598 IEM_MC_END();
6599 break;
6600
6601 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6602 }
6603}
6604
6605/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6606#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
6607 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6609 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6610 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6611 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6612 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6613 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6614 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6615 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6616 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6617 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6618 } IEM_MC_ELSE() { \
6619 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6620 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6621 } IEM_MC_ENDIF(); \
6622 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6623 IEM_MC_END() \
6624
6625/**
6626 * @opcode 0xa4
6627 */
6628FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6629{
6630 /*
6631 * Use the C implementation if a repeat prefix is encountered.
6632 */
6633 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6634 {
6635 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6637 switch (pVCpu->iem.s.enmEffAddrMode)
6638 {
6639 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6640 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6641 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6642 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6643 }
6644 }
6645
6646 /*
6647 * Sharing case implementation with movs[wdq] below.
6648 */
6649 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6650 switch (pVCpu->iem.s.enmEffAddrMode)
6651 {
6652 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6653 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6654 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
6655 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6656 }
6657}
6658
6659
6660/**
6661 * @opcode 0xa5
6662 */
6663FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6664{
6665
6666 /*
6667 * Use the C implementation if a repeat prefix is encountered.
6668 */
6669 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6670 {
6671 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6673 switch (pVCpu->iem.s.enmEffOpSize)
6674 {
6675 case IEMMODE_16BIT:
6676 switch (pVCpu->iem.s.enmEffAddrMode)
6677 {
6678 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6679 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6680 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6681 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6682 }
6683 break;
6684 case IEMMODE_32BIT:
6685 switch (pVCpu->iem.s.enmEffAddrMode)
6686 {
6687 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6688 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6689 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6690 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6691 }
6692 case IEMMODE_64BIT:
6693 switch (pVCpu->iem.s.enmEffAddrMode)
6694 {
6695 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6696 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6697 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6698 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6699 }
6700 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6701 }
6702 }
6703
6704 /*
6705 * Annoying double switch here.
6706 * Using ugly macro for implementing the cases, sharing it with movsb.
6707 */
6708 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6709 switch (pVCpu->iem.s.enmEffOpSize)
6710 {
6711 case IEMMODE_16BIT:
6712 switch (pVCpu->iem.s.enmEffAddrMode)
6713 {
6714 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6715 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6716 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
6717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6718 }
6719 break;
6720
6721 case IEMMODE_32BIT:
6722 switch (pVCpu->iem.s.enmEffAddrMode)
6723 {
6724 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6725 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6726 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
6727 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6728 }
6729 break;
6730
6731 case IEMMODE_64BIT:
6732 switch (pVCpu->iem.s.enmEffAddrMode)
6733 {
6734 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6735 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
6736 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
6737 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6738 }
6739 break;
6740 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6741 }
6742}
6743
6744#undef IEM_MOVS_CASE
6745
6746/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6747#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
6748 IEM_MC_BEGIN(3, 3, a_fMcFlags, 0); \
6749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6750 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
6751 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6752 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6753 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6754 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6755 \
6756 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6757 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
6758 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6759 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
6760 IEM_MC_REF_LOCAL(puValue1, uValue1); \
6761 IEM_MC_REF_EFLAGS(pEFlags); \
6762 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6763 \
6764 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6765 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6766 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6767 } IEM_MC_ELSE() { \
6768 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6769 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6770 } IEM_MC_ENDIF(); \
6771 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6772 IEM_MC_END() \
6773
6774/**
6775 * @opcode 0xa6
6776 */
6777FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6778{
6779
6780 /*
6781 * Use the C implementation if a repeat prefix is encountered.
6782 */
6783 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6784 {
6785 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6787 switch (pVCpu->iem.s.enmEffAddrMode)
6788 {
6789 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6790 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6791 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6792 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6793 }
6794 }
6795 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6796 {
6797 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6799 switch (pVCpu->iem.s.enmEffAddrMode)
6800 {
6801 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6802 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6803 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6805 }
6806 }
6807
6808 /*
6809 * Sharing case implementation with cmps[wdq] below.
6810 */
6811 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
6812 switch (pVCpu->iem.s.enmEffAddrMode)
6813 {
6814 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6815 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6816 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
6817 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6818 }
6819}
6820
6821
6822/**
6823 * @opcode 0xa7
6824 */
6825FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
6826{
6827 /*
6828 * Use the C implementation if a repeat prefix is encountered.
6829 */
6830 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6831 {
6832 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
6833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6834 switch (pVCpu->iem.s.enmEffOpSize)
6835 {
6836 case IEMMODE_16BIT:
6837 switch (pVCpu->iem.s.enmEffAddrMode)
6838 {
6839 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6840 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6841 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6842 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6843 }
6844 break;
6845 case IEMMODE_32BIT:
6846 switch (pVCpu->iem.s.enmEffAddrMode)
6847 {
6848 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6849 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6850 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6851 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6852 }
6853 case IEMMODE_64BIT:
6854 switch (pVCpu->iem.s.enmEffAddrMode)
6855 {
6856 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
6857 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6858 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6859 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6860 }
6861 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6862 }
6863 }
6864
6865 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6866 {
6867 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
6868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6869 switch (pVCpu->iem.s.enmEffOpSize)
6870 {
6871 case IEMMODE_16BIT:
6872 switch (pVCpu->iem.s.enmEffAddrMode)
6873 {
6874 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6875 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6876 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6877 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6878 }
6879 break;
6880 case IEMMODE_32BIT:
6881 switch (pVCpu->iem.s.enmEffAddrMode)
6882 {
6883 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6884 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6885 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6886 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6887 }
6888 case IEMMODE_64BIT:
6889 switch (pVCpu->iem.s.enmEffAddrMode)
6890 {
6891 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
6892 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6893 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6894 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6895 }
6896 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6897 }
6898 }
6899
6900 /*
6901 * Annoying double switch here.
6902 * Using ugly macro for implementing the cases, sharing it with cmpsb.
6903 */
6904 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
6905 switch (pVCpu->iem.s.enmEffOpSize)
6906 {
6907 case IEMMODE_16BIT:
6908 switch (pVCpu->iem.s.enmEffAddrMode)
6909 {
6910 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6911 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6912 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
6913 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6914 }
6915 break;
6916
6917 case IEMMODE_32BIT:
6918 switch (pVCpu->iem.s.enmEffAddrMode)
6919 {
6920 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6921 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6922 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
6923 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6924 }
6925 break;
6926
6927 case IEMMODE_64BIT:
6928 switch (pVCpu->iem.s.enmEffAddrMode)
6929 {
6930 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6931 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
6932 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
6933 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6934 }
6935 break;
6936 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6937 }
6938}
6939
6940#undef IEM_CMPS_CASE
6941
6942/**
6943 * @opcode 0xa8
6944 */
6945FNIEMOP_DEF(iemOp_test_AL_Ib)
6946{
6947 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
6948 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6949 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
6950}
6951
6952
6953/**
6954 * @opcode 0xa9
6955 */
6956FNIEMOP_DEF(iemOp_test_eAX_Iz)
6957{
6958 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
6959 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6960 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
6961}
6962
6963
6964/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
6965#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
6966 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6968 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6969 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6970 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
6971 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6972 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6973 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6974 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6975 } IEM_MC_ELSE() { \
6976 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6977 } IEM_MC_ENDIF(); \
6978 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6979 IEM_MC_END() \
6980
6981/**
6982 * @opcode 0xaa
6983 */
6984FNIEMOP_DEF(iemOp_stosb_Yb_AL)
6985{
6986 /*
6987 * Use the C implementation if a repeat prefix is encountered.
6988 */
6989 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6990 {
6991 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
6992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6993 switch (pVCpu->iem.s.enmEffAddrMode)
6994 {
6995 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m16);
6996 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m32);
6997 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m64);
6998 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6999 }
7000 }
7001
7002 /*
7003 * Sharing case implementation with stos[wdq] below.
7004 */
7005 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
7006 switch (pVCpu->iem.s.enmEffAddrMode)
7007 {
7008 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7009 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7010 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
7011 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7012 }
7013}
7014
7015
7016/**
7017 * @opcode 0xab
7018 */
7019FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7020{
7021 /*
7022 * Use the C implementation if a repeat prefix is encountered.
7023 */
7024 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7025 {
7026 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7028 switch (pVCpu->iem.s.enmEffOpSize)
7029 {
7030 case IEMMODE_16BIT:
7031 switch (pVCpu->iem.s.enmEffAddrMode)
7032 {
7033 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m16);
7034 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m32);
7035 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m64);
7036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7037 }
7038 break;
7039 case IEMMODE_32BIT:
7040 switch (pVCpu->iem.s.enmEffAddrMode)
7041 {
7042 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m16);
7043 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m32);
7044 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m64);
7045 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7046 }
7047 case IEMMODE_64BIT:
7048 switch (pVCpu->iem.s.enmEffAddrMode)
7049 {
7050 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7051 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m32);
7052 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m64);
7053 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7054 }
7055 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7056 }
7057 }
7058
7059 /*
7060 * Annoying double switch here.
7061 * Using ugly macro for implementing the cases, sharing it with stosb.
7062 */
7063 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7064 switch (pVCpu->iem.s.enmEffOpSize)
7065 {
7066 case IEMMODE_16BIT:
7067 switch (pVCpu->iem.s.enmEffAddrMode)
7068 {
7069 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7070 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7071 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7072 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7073 }
7074 break;
7075
7076 case IEMMODE_32BIT:
7077 switch (pVCpu->iem.s.enmEffAddrMode)
7078 {
7079 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7080 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7081 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7082 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7083 }
7084 break;
7085
7086 case IEMMODE_64BIT:
7087 switch (pVCpu->iem.s.enmEffAddrMode)
7088 {
7089 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7090 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7091 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7092 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7093 }
7094 break;
7095 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7096 }
7097}
7098
7099#undef IEM_STOS_CASE
7100
7101/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7102#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7103 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7105 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7106 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7107 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7108 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7109 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7110 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7111 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7112 } IEM_MC_ELSE() { \
7113 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7114 } IEM_MC_ENDIF(); \
7115 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7116 IEM_MC_END() \
7117
7118/**
7119 * @opcode 0xac
7120 */
7121FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7122{
7123 /*
7124 * Use the C implementation if a repeat prefix is encountered.
7125 */
7126 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7127 {
7128 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7130 switch (pVCpu->iem.s.enmEffAddrMode)
7131 {
7132 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7133 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7134 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7136 }
7137 }
7138
7139 /*
7140 * Sharing case implementation with stos[wdq] below.
7141 */
7142 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7143 switch (pVCpu->iem.s.enmEffAddrMode)
7144 {
7145 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7146 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7147 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7149 }
7150}
7151
7152
7153/**
7154 * @opcode 0xad
7155 */
7156FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7157{
7158 /*
7159 * Use the C implementation if a repeat prefix is encountered.
7160 */
7161 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7162 {
7163 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7165 switch (pVCpu->iem.s.enmEffOpSize)
7166 {
7167 case IEMMODE_16BIT:
7168 switch (pVCpu->iem.s.enmEffAddrMode)
7169 {
7170 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7171 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7172 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7173 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7174 }
7175 break;
7176 case IEMMODE_32BIT:
7177 switch (pVCpu->iem.s.enmEffAddrMode)
7178 {
7179 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7180 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7181 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7183 }
7184 case IEMMODE_64BIT:
7185 switch (pVCpu->iem.s.enmEffAddrMode)
7186 {
7187 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7188 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7189 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7191 }
7192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7193 }
7194 }
7195
7196 /*
7197 * Annoying double switch here.
7198 * Using ugly macro for implementing the cases, sharing it with lodsb.
7199 */
7200 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7201 switch (pVCpu->iem.s.enmEffOpSize)
7202 {
7203 case IEMMODE_16BIT:
7204 switch (pVCpu->iem.s.enmEffAddrMode)
7205 {
7206 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7207 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7208 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
7209 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7210 }
7211 break;
7212
7213 case IEMMODE_32BIT:
7214 switch (pVCpu->iem.s.enmEffAddrMode)
7215 {
7216 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7217 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7218 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
7219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7220 }
7221 break;
7222
7223 case IEMMODE_64BIT:
7224 switch (pVCpu->iem.s.enmEffAddrMode)
7225 {
7226 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7227 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
7228 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
7229 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7230 }
7231 break;
7232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7233 }
7234}
7235
7236#undef IEM_LODS_CASE
7237
7238/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7239#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
7240 IEM_MC_BEGIN(3, 2, a_fMcFlags, 0); \
7241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7242 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7243 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7244 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7245 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7246 \
7247 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7248 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7249 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7250 IEM_MC_REF_EFLAGS(pEFlags); \
7251 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7252 \
7253 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7254 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7255 } IEM_MC_ELSE() { \
7256 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7257 } IEM_MC_ENDIF(); \
7258 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7259 IEM_MC_END();
7260
7261/**
7262 * @opcode 0xae
7263 */
7264FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7265{
7266 /*
7267 * Use the C implementation if a repeat prefix is encountered.
7268 */
7269 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7270 {
7271 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7273 switch (pVCpu->iem.s.enmEffAddrMode)
7274 {
7275 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m16);
7276 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m32);
7277 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m64);
7278 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7279 }
7280 }
7281 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7282 {
7283 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7285 switch (pVCpu->iem.s.enmEffAddrMode)
7286 {
7287 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m16);
7288 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m32);
7289 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m64);
7290 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7291 }
7292 }
7293
7294 /*
7295 * Sharing case implementation with stos[wdq] below.
7296 */
7297 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7298 switch (pVCpu->iem.s.enmEffAddrMode)
7299 {
7300 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7301 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7302 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
7303 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7304 }
7305}
7306
7307
7308/**
7309 * @opcode 0xaf
7310 */
7311FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7312{
7313 /*
7314 * Use the C implementation if a repeat prefix is encountered.
7315 */
7316 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7317 {
7318 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7320 switch (pVCpu->iem.s.enmEffOpSize)
7321 {
7322 case IEMMODE_16BIT:
7323 switch (pVCpu->iem.s.enmEffAddrMode)
7324 {
7325 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m16);
7326 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m32);
7327 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m64);
7328 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7329 }
7330 break;
7331 case IEMMODE_32BIT:
7332 switch (pVCpu->iem.s.enmEffAddrMode)
7333 {
7334 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m16);
7335 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m32);
7336 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m64);
7337 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7338 }
7339 case IEMMODE_64BIT:
7340 switch (pVCpu->iem.s.enmEffAddrMode)
7341 {
7342 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7343 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m32);
7344 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m64);
7345 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7346 }
7347 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7348 }
7349 }
7350 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7351 {
7352 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7354 switch (pVCpu->iem.s.enmEffOpSize)
7355 {
7356 case IEMMODE_16BIT:
7357 switch (pVCpu->iem.s.enmEffAddrMode)
7358 {
7359 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m16);
7360 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m32);
7361 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m64);
7362 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7363 }
7364 break;
7365 case IEMMODE_32BIT:
7366 switch (pVCpu->iem.s.enmEffAddrMode)
7367 {
7368 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m16);
7369 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m32);
7370 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m64);
7371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7372 }
7373 case IEMMODE_64BIT:
7374 switch (pVCpu->iem.s.enmEffAddrMode)
7375 {
7376 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
7377 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m32);
7378 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m64);
7379 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7380 }
7381 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7382 }
7383 }
7384
7385 /*
7386 * Annoying double switch here.
7387 * Using ugly macro for implementing the cases, sharing it with scasb.
7388 */
7389 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
7390 switch (pVCpu->iem.s.enmEffOpSize)
7391 {
7392 case IEMMODE_16BIT:
7393 switch (pVCpu->iem.s.enmEffAddrMode)
7394 {
7395 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7396 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7397 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
7398 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7399 }
7400 break;
7401
7402 case IEMMODE_32BIT:
7403 switch (pVCpu->iem.s.enmEffAddrMode)
7404 {
7405 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7406 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7407 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
7408 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7409 }
7410 break;
7411
7412 case IEMMODE_64BIT:
7413 switch (pVCpu->iem.s.enmEffAddrMode)
7414 {
7415 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7416 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
7417 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
7418 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7419 }
7420 break;
7421 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7422 }
7423}
7424
7425#undef IEM_SCAS_CASE
7426
7427/**
7428 * Common 'mov r8, imm8' helper.
7429 */
7430FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
7431{
7432 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7433 IEM_MC_BEGIN(0, 0, 0, 0);
7434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7435 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
7436 IEM_MC_ADVANCE_RIP_AND_FINISH();
7437 IEM_MC_END();
7438}
7439
7440
7441/**
7442 * @opcode 0xb0
7443 */
7444FNIEMOP_DEF(iemOp_mov_AL_Ib)
7445{
7446 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
7447 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7448}
7449
7450
7451/**
7452 * @opcode 0xb1
7453 */
7454FNIEMOP_DEF(iemOp_CL_Ib)
7455{
7456 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
7457 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7458}
7459
7460
7461/**
7462 * @opcode 0xb2
7463 */
7464FNIEMOP_DEF(iemOp_DL_Ib)
7465{
7466 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
7467 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7468}
7469
7470
7471/**
7472 * @opcode 0xb3
7473 */
7474FNIEMOP_DEF(iemOp_BL_Ib)
7475{
7476 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
7477 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7478}
7479
7480
7481/**
7482 * @opcode 0xb4
7483 */
7484FNIEMOP_DEF(iemOp_mov_AH_Ib)
7485{
7486 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
7487 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7488}
7489
7490
7491/**
7492 * @opcode 0xb5
7493 */
7494FNIEMOP_DEF(iemOp_CH_Ib)
7495{
7496 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
7497 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7498}
7499
7500
7501/**
7502 * @opcode 0xb6
7503 */
7504FNIEMOP_DEF(iemOp_DH_Ib)
7505{
7506 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
7507 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7508}
7509
7510
7511/**
7512 * @opcode 0xb7
7513 */
7514FNIEMOP_DEF(iemOp_BH_Ib)
7515{
7516 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
7517 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7518}
7519
7520
7521/**
7522 * Common 'mov regX,immX' helper.
7523 */
7524FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
7525{
7526 switch (pVCpu->iem.s.enmEffOpSize)
7527 {
7528 case IEMMODE_16BIT:
7529 IEM_MC_BEGIN(0, 0, 0, 0);
7530 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7532 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
7533 IEM_MC_ADVANCE_RIP_AND_FINISH();
7534 IEM_MC_END();
7535 break;
7536
7537 case IEMMODE_32BIT:
7538 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7539 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7541 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
7542 IEM_MC_ADVANCE_RIP_AND_FINISH();
7543 IEM_MC_END();
7544 break;
7545
7546 case IEMMODE_64BIT:
7547 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
7548 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
7549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7550 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
7551 IEM_MC_ADVANCE_RIP_AND_FINISH();
7552 IEM_MC_END();
7553 break;
7554 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7555 }
7556}
7557
7558
7559/**
7560 * @opcode 0xb8
7561 */
7562FNIEMOP_DEF(iemOp_eAX_Iv)
7563{
7564 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
7565 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7566}
7567
7568
7569/**
7570 * @opcode 0xb9
7571 */
7572FNIEMOP_DEF(iemOp_eCX_Iv)
7573{
7574 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
7575 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7576}
7577
7578
7579/**
7580 * @opcode 0xba
7581 */
7582FNIEMOP_DEF(iemOp_eDX_Iv)
7583{
7584 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
7585 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7586}
7587
7588
7589/**
7590 * @opcode 0xbb
7591 */
7592FNIEMOP_DEF(iemOp_eBX_Iv)
7593{
7594 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
7595 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7596}
7597
7598
7599/**
7600 * @opcode 0xbc
7601 */
7602FNIEMOP_DEF(iemOp_eSP_Iv)
7603{
7604 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
7605 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7606}
7607
7608
7609/**
7610 * @opcode 0xbd
7611 */
7612FNIEMOP_DEF(iemOp_eBP_Iv)
7613{
7614 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
7615 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7616}
7617
7618
7619/**
7620 * @opcode 0xbe
7621 */
7622FNIEMOP_DEF(iemOp_eSI_Iv)
7623{
7624 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
7625 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7626}
7627
7628
7629/**
7630 * @opcode 0xbf
7631 */
7632FNIEMOP_DEF(iemOp_eDI_Iv)
7633{
7634 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
7635 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7636}
7637
7638
7639/**
7640 * @opcode 0xc0
7641 */
7642FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
7643{
7644 IEMOP_HLP_MIN_186();
7645 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7646 PCIEMOPSHIFTSIZES pImpl;
7647 switch (IEM_GET_MODRM_REG_8(bRm))
7648 {
7649 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
7650 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
7651 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
7652 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
7653 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
7654 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
7655 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
7656 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7657 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7658 }
7659 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7660
7661 if (IEM_IS_MODRM_REG_MODE(bRm))
7662 {
7663 /* register */
7664 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7665 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
7666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7667 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7668 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7669 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7670 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7671 IEM_MC_REF_EFLAGS(pEFlags);
7672 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7673 IEM_MC_ADVANCE_RIP_AND_FINISH();
7674 IEM_MC_END();
7675 }
7676 else
7677 {
7678 /* memory */
7679 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0);
7680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7682
7683 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7685
7686 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7687 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7688 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7689
7690 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7691 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7692 IEM_MC_FETCH_EFLAGS(EFlags);
7693 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7694
7695 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
7696 IEM_MC_COMMIT_EFLAGS(EFlags);
7697 IEM_MC_ADVANCE_RIP_AND_FINISH();
7698 IEM_MC_END();
7699 }
7700}
7701
7702
7703/**
7704 * @opcode 0xc1
7705 */
7706FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
7707{
7708 IEMOP_HLP_MIN_186();
7709 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7710 PCIEMOPSHIFTSIZES pImpl;
7711 switch (IEM_GET_MODRM_REG_8(bRm))
7712 {
7713 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
7714 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
7715 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
7716 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
7717 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
7718 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
7719 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
7720 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7721 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7722 }
7723 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7724
7725 if (IEM_IS_MODRM_REG_MODE(bRm))
7726 {
7727 /* register */
7728 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7729 switch (pVCpu->iem.s.enmEffOpSize)
7730 {
7731 case IEMMODE_16BIT:
7732 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
7733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7734 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7735 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7736 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7737 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7738 IEM_MC_REF_EFLAGS(pEFlags);
7739 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7740 IEM_MC_ADVANCE_RIP_AND_FINISH();
7741 IEM_MC_END();
7742 break;
7743
7744 case IEMMODE_32BIT:
7745 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
7746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7747 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7748 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7749 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7750 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7751 IEM_MC_REF_EFLAGS(pEFlags);
7752 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7753 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
7754 IEM_MC_ADVANCE_RIP_AND_FINISH();
7755 IEM_MC_END();
7756 break;
7757
7758 case IEMMODE_64BIT:
7759 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
7760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7761 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7762 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7763 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7764 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7765 IEM_MC_REF_EFLAGS(pEFlags);
7766 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7767 IEM_MC_ADVANCE_RIP_AND_FINISH();
7768 IEM_MC_END();
7769 break;
7770
7771 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7772 }
7773 }
7774 else
7775 {
7776 /* memory */
7777 switch (pVCpu->iem.s.enmEffOpSize)
7778 {
7779 case IEMMODE_16BIT:
7780 IEM_MC_BEGIN(3, 3, 0, 0);
7781 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7783
7784 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7786
7787 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7788 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7789 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7790
7791 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7792 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7793 IEM_MC_FETCH_EFLAGS(EFlags);
7794 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7795
7796 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
7797 IEM_MC_COMMIT_EFLAGS(EFlags);
7798 IEM_MC_ADVANCE_RIP_AND_FINISH();
7799 IEM_MC_END();
7800 break;
7801
7802 case IEMMODE_32BIT:
7803 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
7804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7806
7807 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7809
7810 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7811 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7812 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7813
7814 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7815 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7816 IEM_MC_FETCH_EFLAGS(EFlags);
7817 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7818
7819 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
7820 IEM_MC_COMMIT_EFLAGS(EFlags);
7821 IEM_MC_ADVANCE_RIP_AND_FINISH();
7822 IEM_MC_END();
7823 break;
7824
7825 case IEMMODE_64BIT:
7826 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
7827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7829
7830 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7832
7833 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7834 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7835 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7836
7837 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7838 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7839 IEM_MC_FETCH_EFLAGS(EFlags);
7840 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7841
7842 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
7843 IEM_MC_COMMIT_EFLAGS(EFlags);
7844 IEM_MC_ADVANCE_RIP_AND_FINISH();
7845 IEM_MC_END();
7846 break;
7847
7848 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7849 }
7850 }
7851}
7852
7853
7854/**
7855 * @opcode 0xc2
7856 */
7857FNIEMOP_DEF(iemOp_retn_Iw)
7858{
7859 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
7860 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7861 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7863 switch (pVCpu->iem.s.enmEffOpSize)
7864 {
7865 case IEMMODE_16BIT:
7866 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_16, u16Imm);
7867 case IEMMODE_32BIT:
7868 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_32, u16Imm);
7869 case IEMMODE_64BIT:
7870 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_64, u16Imm);
7871 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7872 }
7873}
7874
7875
7876/**
7877 * @opcode 0xc3
7878 */
7879FNIEMOP_DEF(iemOp_retn)
7880{
7881 IEMOP_MNEMONIC(retn, "retn");
7882 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7884 switch (pVCpu->iem.s.enmEffOpSize)
7885 {
7886 case IEMMODE_16BIT:
7887 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_16);
7888 case IEMMODE_32BIT:
7889 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_32);
7890 case IEMMODE_64BIT:
7891 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_64);
7892 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7893 }
7894}
7895
7896
7897/**
7898 * @opcode 0xc4
7899 */
7900FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
7901{
7902 /* The LDS instruction is invalid 64-bit mode. In legacy and
7903 compatability mode it is invalid with MOD=3.
7904 The use as a VEX prefix is made possible by assigning the inverted
7905 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
7906 outside of 64-bit mode. VEX is not available in real or v86 mode. */
7907 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7908 if ( IEM_IS_64BIT_CODE(pVCpu)
7909 || IEM_IS_MODRM_REG_MODE(bRm) )
7910 {
7911 IEMOP_MNEMONIC(vex3_prefix, "vex3");
7912 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7913 {
7914 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7915 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7916 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
7917 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7918 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7919 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
7920 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
7921 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7922 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
7923 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
7924 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
7925 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
7926 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
7927
7928 switch (bRm & 0x1f)
7929 {
7930 case 1: /* 0x0f lead opcode byte. */
7931#ifdef IEM_WITH_VEX
7932 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7933#else
7934 IEMOP_BITCH_ABOUT_STUB();
7935 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7936#endif
7937
7938 case 2: /* 0x0f 0x38 lead opcode bytes. */
7939#ifdef IEM_WITH_VEX
7940 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7941#else
7942 IEMOP_BITCH_ABOUT_STUB();
7943 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7944#endif
7945
7946 case 3: /* 0x0f 0x3a lead opcode bytes. */
7947#ifdef IEM_WITH_VEX
7948 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7949#else
7950 IEMOP_BITCH_ABOUT_STUB();
7951 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7952#endif
7953
7954 default:
7955 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
7956 IEMOP_RAISE_INVALID_OPCODE_RET();
7957 }
7958 }
7959 Log(("VEX3: VEX support disabled!\n"));
7960 IEMOP_RAISE_INVALID_OPCODE_RET();
7961 }
7962
7963 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
7964 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
7965}
7966
7967
7968/**
7969 * @opcode 0xc5
7970 */
7971FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
7972{
7973 /* The LES instruction is invalid 64-bit mode. In legacy and
7974 compatability mode it is invalid with MOD=3.
7975 The use as a VEX prefix is made possible by assigning the inverted
7976 REX.R to the top MOD bit, and the top bit in the inverted register
7977 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
7978 to accessing registers 0..7 in this VEX form. */
7979 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7980 if ( IEM_IS_64BIT_CODE(pVCpu)
7981 || IEM_IS_MODRM_REG_MODE(bRm))
7982 {
7983 IEMOP_MNEMONIC(vex2_prefix, "vex2");
7984 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7985 {
7986 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7987 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7988 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7989 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7990 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7991 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
7992 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
7993 pVCpu->iem.s.idxPrefix = bRm & 0x3;
7994
7995#ifdef IEM_WITH_VEX
7996 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7997#else
7998 IEMOP_BITCH_ABOUT_STUB();
7999 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8000#endif
8001 }
8002
8003 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
8004 Log(("VEX2: VEX support disabled!\n"));
8005 IEMOP_RAISE_INVALID_OPCODE_RET();
8006 }
8007
8008 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
8009 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
8010}
8011
8012
8013/**
8014 * @opcode 0xc6
8015 */
8016FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
8017{
8018 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8019 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8020 IEMOP_RAISE_INVALID_OPCODE_RET();
8021 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
8022
8023 if (IEM_IS_MODRM_REG_MODE(bRm))
8024 {
8025 /* register access */
8026 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8027 IEM_MC_BEGIN(0, 0, 0, 0);
8028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8029 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
8030 IEM_MC_ADVANCE_RIP_AND_FINISH();
8031 IEM_MC_END();
8032 }
8033 else
8034 {
8035 /* memory access. */
8036 IEM_MC_BEGIN(0, 1, 0, 0);
8037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8039 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8041 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
8042 IEM_MC_ADVANCE_RIP_AND_FINISH();
8043 IEM_MC_END();
8044 }
8045}
8046
8047
8048/**
8049 * @opcode 0xc7
8050 */
8051FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
8052{
8053 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8054 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
8055 IEMOP_RAISE_INVALID_OPCODE_RET();
8056 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
8057
8058 if (IEM_IS_MODRM_REG_MODE(bRm))
8059 {
8060 /* register access */
8061 switch (pVCpu->iem.s.enmEffOpSize)
8062 {
8063 case IEMMODE_16BIT:
8064 IEM_MC_BEGIN(0, 0, 0, 0);
8065 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8067 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8068 IEM_MC_ADVANCE_RIP_AND_FINISH();
8069 IEM_MC_END();
8070 break;
8071
8072 case IEMMODE_32BIT:
8073 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8074 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8076 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8077 IEM_MC_ADVANCE_RIP_AND_FINISH();
8078 IEM_MC_END();
8079 break;
8080
8081 case IEMMODE_64BIT:
8082 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
8083 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8085 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8086 IEM_MC_ADVANCE_RIP_AND_FINISH();
8087 IEM_MC_END();
8088 break;
8089
8090 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8091 }
8092 }
8093 else
8094 {
8095 /* memory access. */
8096 switch (pVCpu->iem.s.enmEffOpSize)
8097 {
8098 case IEMMODE_16BIT:
8099 IEM_MC_BEGIN(0, 1, 0, 0);
8100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8102 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8104 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8105 IEM_MC_ADVANCE_RIP_AND_FINISH();
8106 IEM_MC_END();
8107 break;
8108
8109 case IEMMODE_32BIT:
8110 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8111 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8112 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8113 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8115 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8116 IEM_MC_ADVANCE_RIP_AND_FINISH();
8117 IEM_MC_END();
8118 break;
8119
8120 case IEMMODE_64BIT:
8121 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8123 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8124 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8126 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8127 IEM_MC_ADVANCE_RIP_AND_FINISH();
8128 IEM_MC_END();
8129 break;
8130
8131 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8132 }
8133 }
8134}
8135
8136
8137
8138
8139/**
8140 * @opcode 0xc8
8141 */
8142FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8143{
8144 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8145 IEMOP_HLP_MIN_186();
8146 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8147 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
8148 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
8149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8150 IEM_MC_DEFER_TO_CIMPL_3_RET(0, iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
8151}
8152
8153
8154/**
8155 * @opcode 0xc9
8156 */
8157FNIEMOP_DEF(iemOp_leave)
8158{
8159 IEMOP_MNEMONIC(leave, "leave");
8160 IEMOP_HLP_MIN_186();
8161 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8163 IEM_MC_DEFER_TO_CIMPL_1_RET(0, iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
8164}
8165
8166
8167/**
8168 * @opcode 0xca
8169 */
8170FNIEMOP_DEF(iemOp_retf_Iw)
8171{
8172 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
8173 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8175 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
8176 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
8177}
8178
8179
8180/**
8181 * @opcode 0xcb
8182 */
8183FNIEMOP_DEF(iemOp_retf)
8184{
8185 IEMOP_MNEMONIC(retf, "retf");
8186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8187 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
8188 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
8189}
8190
8191
8192/**
8193 * @opcode 0xcc
8194 */
8195FNIEMOP_DEF(iemOp_int3)
8196{
8197 IEMOP_MNEMONIC(int3, "int3");
8198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8199 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8200 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8201 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
8202}
8203
8204
8205/**
8206 * @opcode 0xcd
8207 */
8208FNIEMOP_DEF(iemOp_int_Ib)
8209{
8210 IEMOP_MNEMONIC(int_Ib, "int Ib");
8211 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
8212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8213 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8214 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8215 iemCImpl_int, u8Int, IEMINT_INTN);
8216}
8217
8218
8219/**
8220 * @opcode 0xce
8221 */
8222FNIEMOP_DEF(iemOp_into)
8223{
8224 IEMOP_MNEMONIC(into, "into");
8225 IEMOP_HLP_NO_64BIT();
8226 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_CONDITIONAL
8227 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8228 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
8229}
8230
8231
8232/**
8233 * @opcode 0xcf
8234 */
8235FNIEMOP_DEF(iemOp_iret)
8236{
8237 IEMOP_MNEMONIC(iret, "iret");
8238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8239 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8240 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
8241 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
8242}
8243
8244
8245/**
8246 * @opcode 0xd0
8247 */
8248FNIEMOP_DEF(iemOp_Grp2_Eb_1)
8249{
8250 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8251 PCIEMOPSHIFTSIZES pImpl;
8252 switch (IEM_GET_MODRM_REG_8(bRm))
8253 {
8254 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
8255 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
8256 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
8257 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
8258 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
8259 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
8260 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
8261 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8262 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8263 }
8264 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8265
8266 if (IEM_IS_MODRM_REG_MODE(bRm))
8267 {
8268 /* register */
8269 IEM_MC_BEGIN(3, 0, 0, 0);
8270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8271 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8272 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8273 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8274 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8275 IEM_MC_REF_EFLAGS(pEFlags);
8276 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8277 IEM_MC_ADVANCE_RIP_AND_FINISH();
8278 IEM_MC_END();
8279 }
8280 else
8281 {
8282 /* memory */
8283 IEM_MC_BEGIN(3, 3, 0, 0);
8284 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8285 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8286 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8288 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8289
8290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8292 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8293 IEM_MC_FETCH_EFLAGS(EFlags);
8294 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8295
8296 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8297 IEM_MC_COMMIT_EFLAGS(EFlags);
8298 IEM_MC_ADVANCE_RIP_AND_FINISH();
8299 IEM_MC_END();
8300 }
8301}
8302
8303
8304
8305/**
8306 * @opcode 0xd1
8307 */
8308FNIEMOP_DEF(iemOp_Grp2_Ev_1)
8309{
8310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8311 PCIEMOPSHIFTSIZES pImpl;
8312 switch (IEM_GET_MODRM_REG_8(bRm))
8313 {
8314 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
8315 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
8316 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
8317 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
8318 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
8319 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
8320 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
8321 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8322 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8323 }
8324 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8325
8326 if (IEM_IS_MODRM_REG_MODE(bRm))
8327 {
8328 /* register */
8329 switch (pVCpu->iem.s.enmEffOpSize)
8330 {
8331 case IEMMODE_16BIT:
8332 IEM_MC_BEGIN(3, 0, 0, 0);
8333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8334 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8335 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8336 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8337 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8338 IEM_MC_REF_EFLAGS(pEFlags);
8339 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8340 IEM_MC_ADVANCE_RIP_AND_FINISH();
8341 IEM_MC_END();
8342 break;
8343
8344 case IEMMODE_32BIT:
8345 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8347 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8348 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8349 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8350 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8351 IEM_MC_REF_EFLAGS(pEFlags);
8352 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8353 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
8354 IEM_MC_ADVANCE_RIP_AND_FINISH();
8355 IEM_MC_END();
8356 break;
8357
8358 case IEMMODE_64BIT:
8359 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8361 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8362 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8363 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8364 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8365 IEM_MC_REF_EFLAGS(pEFlags);
8366 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8367 IEM_MC_ADVANCE_RIP_AND_FINISH();
8368 IEM_MC_END();
8369 break;
8370
8371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8372 }
8373 }
8374 else
8375 {
8376 /* memory */
8377 switch (pVCpu->iem.s.enmEffOpSize)
8378 {
8379 case IEMMODE_16BIT:
8380 IEM_MC_BEGIN(3, 3, 0, 0);
8381 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8382 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8383 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8385 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8386
8387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8389 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8390 IEM_MC_FETCH_EFLAGS(EFlags);
8391 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8392
8393 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
8394 IEM_MC_COMMIT_EFLAGS(EFlags);
8395 IEM_MC_ADVANCE_RIP_AND_FINISH();
8396 IEM_MC_END();
8397 break;
8398
8399 case IEMMODE_32BIT:
8400 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8401 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8402 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8403 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8405 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8406
8407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8409 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8410 IEM_MC_FETCH_EFLAGS(EFlags);
8411 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8412
8413 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
8414 IEM_MC_COMMIT_EFLAGS(EFlags);
8415 IEM_MC_ADVANCE_RIP_AND_FINISH();
8416 IEM_MC_END();
8417 break;
8418
8419 case IEMMODE_64BIT:
8420 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8421 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8422 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8423 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8425 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8426
8427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8429 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8430 IEM_MC_FETCH_EFLAGS(EFlags);
8431 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8432
8433 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
8434 IEM_MC_COMMIT_EFLAGS(EFlags);
8435 IEM_MC_ADVANCE_RIP_AND_FINISH();
8436 IEM_MC_END();
8437 break;
8438
8439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8440 }
8441 }
8442}
8443
8444
8445/**
8446 * @opcode 0xd2
8447 */
8448FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
8449{
8450 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8451 PCIEMOPSHIFTSIZES pImpl;
8452 switch (IEM_GET_MODRM_REG_8(bRm))
8453 {
8454 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
8455 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
8456 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
8457 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
8458 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
8459 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
8460 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
8461 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8462 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
8463 }
8464 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8465
8466 if (IEM_IS_MODRM_REG_MODE(bRm))
8467 {
8468 /* register */
8469 IEM_MC_BEGIN(3, 0, 0, 0);
8470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8471 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8472 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8473 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8474 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8475 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8476 IEM_MC_REF_EFLAGS(pEFlags);
8477 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8478 IEM_MC_ADVANCE_RIP_AND_FINISH();
8479 IEM_MC_END();
8480 }
8481 else
8482 {
8483 /* memory */
8484 IEM_MC_BEGIN(3, 3, 0, 0);
8485 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8486 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8487 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8489 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8490
8491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8493 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8494 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8495 IEM_MC_FETCH_EFLAGS(EFlags);
8496 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8497
8498 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8499 IEM_MC_COMMIT_EFLAGS(EFlags);
8500 IEM_MC_ADVANCE_RIP_AND_FINISH();
8501 IEM_MC_END();
8502 }
8503}
8504
8505
8506/**
8507 * @opcode 0xd3
8508 */
8509FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
8510{
8511 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8512 PCIEMOPSHIFTSIZES pImpl;
8513 switch (IEM_GET_MODRM_REG_8(bRm))
8514 {
8515 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
8516 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
8517 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
8518 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
8519 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
8520 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
8521 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
8522 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8523 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8524 }
8525 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8526
8527 if (IEM_IS_MODRM_REG_MODE(bRm))
8528 {
8529 /* register */
8530 switch (pVCpu->iem.s.enmEffOpSize)
8531 {
8532 case IEMMODE_16BIT:
8533 IEM_MC_BEGIN(3, 0, 0, 0);
8534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8535 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8536 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8537 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8538 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8539 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8540 IEM_MC_REF_EFLAGS(pEFlags);
8541 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8542 IEM_MC_ADVANCE_RIP_AND_FINISH();
8543 IEM_MC_END();
8544 break;
8545
8546 case IEMMODE_32BIT:
8547 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8549 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8550 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8551 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8552 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8553 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8554 IEM_MC_REF_EFLAGS(pEFlags);
8555 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8556 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
8557 IEM_MC_ADVANCE_RIP_AND_FINISH();
8558 IEM_MC_END();
8559 break;
8560
8561 case IEMMODE_64BIT:
8562 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8564 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8565 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8566 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8567 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8568 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8569 IEM_MC_REF_EFLAGS(pEFlags);
8570 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8571 IEM_MC_ADVANCE_RIP_AND_FINISH();
8572 IEM_MC_END();
8573 break;
8574
8575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8576 }
8577 }
8578 else
8579 {
8580 /* memory */
8581 switch (pVCpu->iem.s.enmEffOpSize)
8582 {
8583 case IEMMODE_16BIT:
8584 IEM_MC_BEGIN(3, 3, 0, 0);
8585 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8586 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8587 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8589 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8590
8591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8593 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8594 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8595 IEM_MC_FETCH_EFLAGS(EFlags);
8596 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8597
8598 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
8599 IEM_MC_COMMIT_EFLAGS(EFlags);
8600 IEM_MC_ADVANCE_RIP_AND_FINISH();
8601 IEM_MC_END();
8602 break;
8603
8604 case IEMMODE_32BIT:
8605 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8606 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8607 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8608 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8610 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8611
8612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8614 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8615 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8616 IEM_MC_FETCH_EFLAGS(EFlags);
8617 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8618
8619 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
8620 IEM_MC_COMMIT_EFLAGS(EFlags);
8621 IEM_MC_ADVANCE_RIP_AND_FINISH();
8622 IEM_MC_END();
8623 break;
8624
8625 case IEMMODE_64BIT:
8626 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8627 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8628 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8629 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8631 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8632
8633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8635 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8636 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8637 IEM_MC_FETCH_EFLAGS(EFlags);
8638 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8639
8640 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
8641 IEM_MC_COMMIT_EFLAGS(EFlags);
8642 IEM_MC_ADVANCE_RIP_AND_FINISH();
8643 IEM_MC_END();
8644 break;
8645
8646 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8647 }
8648 }
8649}
8650
8651/**
8652 * @opcode 0xd4
8653 */
8654FNIEMOP_DEF(iemOp_aam_Ib)
8655{
8656 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
8657 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
8658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8659 IEMOP_HLP_NO_64BIT();
8660 if (!bImm)
8661 IEMOP_RAISE_DIVIDE_ERROR_RET();
8662 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aam, bImm);
8663}
8664
8665
8666/**
8667 * @opcode 0xd5
8668 */
8669FNIEMOP_DEF(iemOp_aad_Ib)
8670{
8671 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
8672 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
8673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8674 IEMOP_HLP_NO_64BIT();
8675 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aad, bImm);
8676}
8677
8678
8679/**
8680 * @opcode 0xd6
8681 */
8682FNIEMOP_DEF(iemOp_salc)
8683{
8684 IEMOP_MNEMONIC(salc, "salc");
8685 IEMOP_HLP_NO_64BIT();
8686
8687 IEM_MC_BEGIN(0, 0, 0, 0);
8688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8689 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8690 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
8691 } IEM_MC_ELSE() {
8692 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
8693 } IEM_MC_ENDIF();
8694 IEM_MC_ADVANCE_RIP_AND_FINISH();
8695 IEM_MC_END();
8696}
8697
8698
8699/**
8700 * @opcode 0xd7
8701 */
8702FNIEMOP_DEF(iemOp_xlat)
8703{
8704 IEMOP_MNEMONIC(xlat, "xlat");
8705 switch (pVCpu->iem.s.enmEffAddrMode)
8706 {
8707 case IEMMODE_16BIT:
8708 IEM_MC_BEGIN(2, 0, 0, 0);
8709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8710 IEM_MC_LOCAL(uint8_t, u8Tmp);
8711 IEM_MC_LOCAL(uint16_t, u16Addr);
8712 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
8713 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
8714 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
8715 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8716 IEM_MC_ADVANCE_RIP_AND_FINISH();
8717 IEM_MC_END();
8718 break;
8719
8720 case IEMMODE_32BIT:
8721 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
8722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8723 IEM_MC_LOCAL(uint8_t, u8Tmp);
8724 IEM_MC_LOCAL(uint32_t, u32Addr);
8725 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
8726 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
8727 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
8728 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8729 IEM_MC_ADVANCE_RIP_AND_FINISH();
8730 IEM_MC_END();
8731 break;
8732
8733 case IEMMODE_64BIT:
8734 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
8735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8736 IEM_MC_LOCAL(uint8_t, u8Tmp);
8737 IEM_MC_LOCAL(uint64_t, u64Addr);
8738 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
8739 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
8740 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
8741 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8742 IEM_MC_ADVANCE_RIP_AND_FINISH();
8743 IEM_MC_END();
8744 break;
8745
8746 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8747 }
8748}
8749
8750
8751/**
8752 * Common worker for FPU instructions working on ST0 and STn, and storing the
8753 * result in ST0.
8754 *
8755 * @param bRm Mod R/M byte.
8756 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8757 */
8758FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8759{
8760 IEM_MC_BEGIN(3, 1, 0, 0);
8761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8762 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8763 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8764 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8765 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8766
8767 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8768 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8769 IEM_MC_PREPARE_FPU_USAGE();
8770 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8771 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8772 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8773 } IEM_MC_ELSE() {
8774 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8775 } IEM_MC_ENDIF();
8776 IEM_MC_ADVANCE_RIP_AND_FINISH();
8777
8778 IEM_MC_END();
8779}
8780
8781
8782/**
8783 * Common worker for FPU instructions working on ST0 and STn, and only affecting
8784 * flags.
8785 *
8786 * @param bRm Mod R/M byte.
8787 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8788 */
8789FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8790{
8791 IEM_MC_BEGIN(3, 1, 0, 0);
8792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8793 IEM_MC_LOCAL(uint16_t, u16Fsw);
8794 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8795 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8796 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8797
8798 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8799 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8800 IEM_MC_PREPARE_FPU_USAGE();
8801 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8802 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8803 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8804 } IEM_MC_ELSE() {
8805 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8806 } IEM_MC_ENDIF();
8807 IEM_MC_ADVANCE_RIP_AND_FINISH();
8808
8809 IEM_MC_END();
8810}
8811
8812
8813/**
8814 * Common worker for FPU instructions working on ST0 and STn, only affecting
8815 * flags, and popping when done.
8816 *
8817 * @param bRm Mod R/M byte.
8818 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8819 */
8820FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8821{
8822 IEM_MC_BEGIN(3, 1, 0, 0);
8823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8824 IEM_MC_LOCAL(uint16_t, u16Fsw);
8825 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8826 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8827 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8828
8829 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8830 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8831 IEM_MC_PREPARE_FPU_USAGE();
8832 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8833 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8834 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8835 } IEM_MC_ELSE() {
8836 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8837 } IEM_MC_ENDIF();
8838 IEM_MC_ADVANCE_RIP_AND_FINISH();
8839
8840 IEM_MC_END();
8841}
8842
8843
8844/** Opcode 0xd8 11/0. */
8845FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
8846{
8847 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
8848 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
8849}
8850
8851
8852/** Opcode 0xd8 11/1. */
8853FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
8854{
8855 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
8856 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
8857}
8858
8859
8860/** Opcode 0xd8 11/2. */
8861FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
8862{
8863 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
8864 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
8865}
8866
8867
8868/** Opcode 0xd8 11/3. */
8869FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
8870{
8871 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
8872 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
8873}
8874
8875
8876/** Opcode 0xd8 11/4. */
8877FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
8878{
8879 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
8880 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
8881}
8882
8883
8884/** Opcode 0xd8 11/5. */
8885FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
8886{
8887 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
8888 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
8889}
8890
8891
8892/** Opcode 0xd8 11/6. */
8893FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
8894{
8895 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
8896 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
8897}
8898
8899
8900/** Opcode 0xd8 11/7. */
8901FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
8902{
8903 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
8904 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
8905}
8906
8907
8908/**
8909 * Common worker for FPU instructions working on ST0 and an m32r, and storing
8910 * the result in ST0.
8911 *
8912 * @param bRm Mod R/M byte.
8913 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8914 */
8915FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
8916{
8917 IEM_MC_BEGIN(3, 3, 0, 0);
8918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8919 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8920 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8921 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8922 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8923 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8924
8925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8927
8928 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8929 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8930 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8931
8932 IEM_MC_PREPARE_FPU_USAGE();
8933 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8934 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
8935 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8936 } IEM_MC_ELSE() {
8937 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8938 } IEM_MC_ENDIF();
8939 IEM_MC_ADVANCE_RIP_AND_FINISH();
8940
8941 IEM_MC_END();
8942}
8943
8944
8945/** Opcode 0xd8 !11/0. */
8946FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
8947{
8948 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
8949 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
8950}
8951
8952
8953/** Opcode 0xd8 !11/1. */
8954FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
8955{
8956 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
8957 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
8958}
8959
8960
8961/** Opcode 0xd8 !11/2. */
8962FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
8963{
8964 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
8965
8966 IEM_MC_BEGIN(3, 3, 0, 0);
8967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8968 IEM_MC_LOCAL(uint16_t, u16Fsw);
8969 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8970 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8971 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8972 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8973
8974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8976
8977 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8978 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8979 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8980
8981 IEM_MC_PREPARE_FPU_USAGE();
8982 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8983 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8984 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8985 } IEM_MC_ELSE() {
8986 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8987 } IEM_MC_ENDIF();
8988 IEM_MC_ADVANCE_RIP_AND_FINISH();
8989
8990 IEM_MC_END();
8991}
8992
8993
8994/** Opcode 0xd8 !11/3. */
8995FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
8996{
8997 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
8998
8999 IEM_MC_BEGIN(3, 3, 0, 0);
9000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9001 IEM_MC_LOCAL(uint16_t, u16Fsw);
9002 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9003 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9004 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9005 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9006
9007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9009
9010 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9011 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9012 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9013
9014 IEM_MC_PREPARE_FPU_USAGE();
9015 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9016 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
9017 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9018 } IEM_MC_ELSE() {
9019 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9020 } IEM_MC_ENDIF();
9021 IEM_MC_ADVANCE_RIP_AND_FINISH();
9022
9023 IEM_MC_END();
9024}
9025
9026
9027/** Opcode 0xd8 !11/4. */
9028FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
9029{
9030 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
9031 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
9032}
9033
9034
9035/** Opcode 0xd8 !11/5. */
9036FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
9037{
9038 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
9039 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
9040}
9041
9042
9043/** Opcode 0xd8 !11/6. */
9044FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
9045{
9046 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
9047 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
9048}
9049
9050
9051/** Opcode 0xd8 !11/7. */
9052FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
9053{
9054 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
9055 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
9056}
9057
9058
9059/**
9060 * @opcode 0xd8
9061 */
9062FNIEMOP_DEF(iemOp_EscF0)
9063{
9064 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9065 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
9066
9067 if (IEM_IS_MODRM_REG_MODE(bRm))
9068 {
9069 switch (IEM_GET_MODRM_REG_8(bRm))
9070 {
9071 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
9072 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
9073 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
9074 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9075 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
9076 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
9077 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
9078 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
9079 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9080 }
9081 }
9082 else
9083 {
9084 switch (IEM_GET_MODRM_REG_8(bRm))
9085 {
9086 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
9087 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
9088 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
9089 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
9090 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
9091 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
9092 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
9093 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
9094 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9095 }
9096 }
9097}
9098
9099
9100/** Opcode 0xd9 /0 mem32real
9101 * @sa iemOp_fld_m64r */
9102FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
9103{
9104 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
9105
9106 IEM_MC_BEGIN(2, 3, 0, 0);
9107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9108 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9109 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
9110 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9111 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
9112
9113 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9115
9116 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9117 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9118 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9119 IEM_MC_PREPARE_FPU_USAGE();
9120 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9121 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
9122 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9123 } IEM_MC_ELSE() {
9124 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9125 } IEM_MC_ENDIF();
9126 IEM_MC_ADVANCE_RIP_AND_FINISH();
9127
9128 IEM_MC_END();
9129}
9130
9131
9132/** Opcode 0xd9 !11/2 mem32real */
9133FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
9134{
9135 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
9136 IEM_MC_BEGIN(3, 2, 0, 0);
9137 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9138 IEM_MC_LOCAL(uint16_t, u16Fsw);
9139 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9140 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9141 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9142
9143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9145 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9146 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9147
9148 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9149 IEM_MC_PREPARE_FPU_USAGE();
9150 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9151 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9152 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9153 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9154 } IEM_MC_ELSE() {
9155 IEM_MC_IF_FCW_IM() {
9156 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9157 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9158 } IEM_MC_ENDIF();
9159 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9160 } IEM_MC_ENDIF();
9161 IEM_MC_ADVANCE_RIP_AND_FINISH();
9162
9163 IEM_MC_END();
9164}
9165
9166
9167/** Opcode 0xd9 !11/3 */
9168FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
9169{
9170 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
9171 IEM_MC_BEGIN(3, 2, 0, 0);
9172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9173 IEM_MC_LOCAL(uint16_t, u16Fsw);
9174 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9175 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9176 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9177
9178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9180 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9181 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9182
9183 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9184 IEM_MC_PREPARE_FPU_USAGE();
9185 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9186 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9187 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9188 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9189 } IEM_MC_ELSE() {
9190 IEM_MC_IF_FCW_IM() {
9191 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9192 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9193 } IEM_MC_ENDIF();
9194 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9195 } IEM_MC_ENDIF();
9196 IEM_MC_ADVANCE_RIP_AND_FINISH();
9197
9198 IEM_MC_END();
9199}
9200
9201
9202/** Opcode 0xd9 !11/4 */
9203FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
9204{
9205 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
9206 IEM_MC_BEGIN(3, 0, 0, 0);
9207 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9209
9210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9211 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9212 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9213
9214 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9215 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
9216 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9217 IEM_MC_END();
9218}
9219
9220
9221/** Opcode 0xd9 !11/5 */
9222FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
9223{
9224 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
9225 IEM_MC_BEGIN(1, 1, 0, 0);
9226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9228
9229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9230 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9231 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9232
9233 IEM_MC_ARG(uint16_t, u16Fsw, 0);
9234 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9235
9236 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, iemCImpl_fldcw, u16Fsw);
9237 IEM_MC_END();
9238}
9239
9240
9241/** Opcode 0xd9 !11/6 */
9242FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
9243{
9244 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
9245 IEM_MC_BEGIN(3, 0, 0, 0);
9246 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9248
9249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9250 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9251 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9252
9253 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9254 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
9255 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
9256 IEM_MC_END();
9257}
9258
9259
9260/** Opcode 0xd9 !11/7 */
9261FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
9262{
9263 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
9264 IEM_MC_BEGIN(2, 0, 0, 0);
9265 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9266 IEM_MC_LOCAL(uint16_t, u16Fcw);
9267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9269 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9270 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9271 IEM_MC_FETCH_FCW(u16Fcw);
9272 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
9273 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9274 IEM_MC_END();
9275}
9276
9277
9278/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
9279FNIEMOP_DEF(iemOp_fnop)
9280{
9281 IEMOP_MNEMONIC(fnop, "fnop");
9282 IEM_MC_BEGIN(0, 0, 0, 0);
9283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9284 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9285 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9286 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9287 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
9288 * intel optimizations. Investigate. */
9289 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9290 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9291 IEM_MC_END();
9292}
9293
9294
9295/** Opcode 0xd9 11/0 stN */
9296FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
9297{
9298 IEMOP_MNEMONIC(fld_stN, "fld stN");
9299 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9300 * indicates that it does. */
9301 IEM_MC_BEGIN(0, 2, 0, 0);
9302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9303 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9304 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9305 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9306 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9307
9308 IEM_MC_PREPARE_FPU_USAGE();
9309 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
9310 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9311 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9312 } IEM_MC_ELSE() {
9313 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
9314 } IEM_MC_ENDIF();
9315
9316 IEM_MC_ADVANCE_RIP_AND_FINISH();
9317 IEM_MC_END();
9318}
9319
9320
9321/** Opcode 0xd9 11/3 stN */
9322FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
9323{
9324 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
9325 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9326 * indicates that it does. */
9327 IEM_MC_BEGIN(2, 3, 0, 0);
9328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9329 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
9330 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
9331 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9332 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
9333 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
9334 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9335 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9336
9337 IEM_MC_PREPARE_FPU_USAGE();
9338 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9339 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
9340 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
9341 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9342 } IEM_MC_ELSE() {
9343 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
9344 } IEM_MC_ENDIF();
9345
9346 IEM_MC_ADVANCE_RIP_AND_FINISH();
9347 IEM_MC_END();
9348}
9349
9350
9351/** Opcode 0xd9 11/4, 0xdd 11/2. */
9352FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
9353{
9354 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
9355
9356 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
9357 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
9358 if (!iDstReg)
9359 {
9360 IEM_MC_BEGIN(0, 1, 0, 0);
9361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9362 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
9363 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9364 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9365
9366 IEM_MC_PREPARE_FPU_USAGE();
9367 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
9368 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9369 } IEM_MC_ELSE() {
9370 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
9371 } IEM_MC_ENDIF();
9372
9373 IEM_MC_ADVANCE_RIP_AND_FINISH();
9374 IEM_MC_END();
9375 }
9376 else
9377 {
9378 IEM_MC_BEGIN(0, 2, 0, 0);
9379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9380 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9381 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9382 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9383 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9384
9385 IEM_MC_PREPARE_FPU_USAGE();
9386 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9387 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9388 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
9389 } IEM_MC_ELSE() {
9390 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
9391 } IEM_MC_ENDIF();
9392
9393 IEM_MC_ADVANCE_RIP_AND_FINISH();
9394 IEM_MC_END();
9395 }
9396}
9397
9398
9399/**
9400 * Common worker for FPU instructions working on ST0 and replaces it with the
9401 * result, i.e. unary operators.
9402 *
9403 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9404 */
9405FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
9406{
9407 IEM_MC_BEGIN(2, 1, 0, 0);
9408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9409 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9410 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9411 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9412
9413 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9414 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9415 IEM_MC_PREPARE_FPU_USAGE();
9416 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9417 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
9418 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9419 } IEM_MC_ELSE() {
9420 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9421 } IEM_MC_ENDIF();
9422 IEM_MC_ADVANCE_RIP_AND_FINISH();
9423
9424 IEM_MC_END();
9425}
9426
9427
9428/** Opcode 0xd9 0xe0. */
9429FNIEMOP_DEF(iemOp_fchs)
9430{
9431 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
9432 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
9433}
9434
9435
9436/** Opcode 0xd9 0xe1. */
9437FNIEMOP_DEF(iemOp_fabs)
9438{
9439 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
9440 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
9441}
9442
9443
9444/** Opcode 0xd9 0xe4. */
9445FNIEMOP_DEF(iemOp_ftst)
9446{
9447 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
9448 IEM_MC_BEGIN(2, 1, 0, 0);
9449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9450 IEM_MC_LOCAL(uint16_t, u16Fsw);
9451 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9452 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9453
9454 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9455 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9456 IEM_MC_PREPARE_FPU_USAGE();
9457 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9458 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
9459 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9460 } IEM_MC_ELSE() {
9461 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9462 } IEM_MC_ENDIF();
9463 IEM_MC_ADVANCE_RIP_AND_FINISH();
9464
9465 IEM_MC_END();
9466}
9467
9468
9469/** Opcode 0xd9 0xe5. */
9470FNIEMOP_DEF(iemOp_fxam)
9471{
9472 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
9473 IEM_MC_BEGIN(2, 1, 0, 0);
9474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9475 IEM_MC_LOCAL(uint16_t, u16Fsw);
9476 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9477 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9478
9479 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9480 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9481 IEM_MC_PREPARE_FPU_USAGE();
9482 IEM_MC_REF_FPUREG(pr80Value, 0);
9483 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
9484 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9485 IEM_MC_ADVANCE_RIP_AND_FINISH();
9486
9487 IEM_MC_END();
9488}
9489
9490
9491/**
9492 * Common worker for FPU instructions pushing a constant onto the FPU stack.
9493 *
9494 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9495 */
9496FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
9497{
9498 IEM_MC_BEGIN(1, 1, 0, 0);
9499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9500 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9501 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9502
9503 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9504 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9505 IEM_MC_PREPARE_FPU_USAGE();
9506 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9507 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
9508 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9509 } IEM_MC_ELSE() {
9510 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
9511 } IEM_MC_ENDIF();
9512 IEM_MC_ADVANCE_RIP_AND_FINISH();
9513
9514 IEM_MC_END();
9515}
9516
9517
9518/** Opcode 0xd9 0xe8. */
9519FNIEMOP_DEF(iemOp_fld1)
9520{
9521 IEMOP_MNEMONIC(fld1, "fld1");
9522 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
9523}
9524
9525
9526/** Opcode 0xd9 0xe9. */
9527FNIEMOP_DEF(iemOp_fldl2t)
9528{
9529 IEMOP_MNEMONIC(fldl2t, "fldl2t");
9530 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
9531}
9532
9533
9534/** Opcode 0xd9 0xea. */
9535FNIEMOP_DEF(iemOp_fldl2e)
9536{
9537 IEMOP_MNEMONIC(fldl2e, "fldl2e");
9538 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
9539}
9540
9541/** Opcode 0xd9 0xeb. */
9542FNIEMOP_DEF(iemOp_fldpi)
9543{
9544 IEMOP_MNEMONIC(fldpi, "fldpi");
9545 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
9546}
9547
9548
9549/** Opcode 0xd9 0xec. */
9550FNIEMOP_DEF(iemOp_fldlg2)
9551{
9552 IEMOP_MNEMONIC(fldlg2, "fldlg2");
9553 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
9554}
9555
9556/** Opcode 0xd9 0xed. */
9557FNIEMOP_DEF(iemOp_fldln2)
9558{
9559 IEMOP_MNEMONIC(fldln2, "fldln2");
9560 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
9561}
9562
9563
9564/** Opcode 0xd9 0xee. */
9565FNIEMOP_DEF(iemOp_fldz)
9566{
9567 IEMOP_MNEMONIC(fldz, "fldz");
9568 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
9569}
9570
9571
9572/** Opcode 0xd9 0xf0.
9573 *
9574 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
9575 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
9576 * to produce proper results for +Inf and -Inf.
9577 *
9578 * This is probably usful in the implementation pow() and similar.
9579 */
9580FNIEMOP_DEF(iemOp_f2xm1)
9581{
9582 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
9583 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
9584}
9585
9586
9587/**
9588 * Common worker for FPU instructions working on STn and ST0, storing the result
9589 * in STn, and popping the stack unless IE, DE or ZE was raised.
9590 *
9591 * @param bRm Mod R/M byte.
9592 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9593 */
9594FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9595{
9596 IEM_MC_BEGIN(3, 1, 0, 0);
9597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9598 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9599 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9600 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9601 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9602
9603 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9604 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9605
9606 IEM_MC_PREPARE_FPU_USAGE();
9607 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
9608 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9609 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
9610 } IEM_MC_ELSE() {
9611 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
9612 } IEM_MC_ENDIF();
9613 IEM_MC_ADVANCE_RIP_AND_FINISH();
9614
9615 IEM_MC_END();
9616}
9617
9618
9619/** Opcode 0xd9 0xf1. */
9620FNIEMOP_DEF(iemOp_fyl2x)
9621{
9622 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
9623 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
9624}
9625
9626
9627/**
9628 * Common worker for FPU instructions working on ST0 and having two outputs, one
9629 * replacing ST0 and one pushed onto the stack.
9630 *
9631 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9632 */
9633FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
9634{
9635 IEM_MC_BEGIN(2, 1, 0, 0);
9636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9637 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
9638 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
9639 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9640
9641 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9642 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9643 IEM_MC_PREPARE_FPU_USAGE();
9644 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9645 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
9646 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
9647 } IEM_MC_ELSE() {
9648 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
9649 } IEM_MC_ENDIF();
9650 IEM_MC_ADVANCE_RIP_AND_FINISH();
9651
9652 IEM_MC_END();
9653}
9654
9655
9656/** Opcode 0xd9 0xf2. */
9657FNIEMOP_DEF(iemOp_fptan)
9658{
9659 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
9660 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
9661}
9662
9663
9664/** Opcode 0xd9 0xf3. */
9665FNIEMOP_DEF(iemOp_fpatan)
9666{
9667 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
9668 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
9669}
9670
9671
9672/** Opcode 0xd9 0xf4. */
9673FNIEMOP_DEF(iemOp_fxtract)
9674{
9675 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
9676 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
9677}
9678
9679
9680/** Opcode 0xd9 0xf5. */
9681FNIEMOP_DEF(iemOp_fprem1)
9682{
9683 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
9684 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
9685}
9686
9687
9688/** Opcode 0xd9 0xf6. */
9689FNIEMOP_DEF(iemOp_fdecstp)
9690{
9691 IEMOP_MNEMONIC(fdecstp, "fdecstp");
9692 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9693 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9694 * FINCSTP and FDECSTP. */
9695 IEM_MC_BEGIN(0, 0, 0, 0);
9696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9697
9698 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9699 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9700
9701 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9702 IEM_MC_FPU_STACK_DEC_TOP();
9703 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9704
9705 IEM_MC_ADVANCE_RIP_AND_FINISH();
9706 IEM_MC_END();
9707}
9708
9709
9710/** Opcode 0xd9 0xf7. */
9711FNIEMOP_DEF(iemOp_fincstp)
9712{
9713 IEMOP_MNEMONIC(fincstp, "fincstp");
9714 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9715 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9716 * FINCSTP and FDECSTP. */
9717 IEM_MC_BEGIN(0, 0, 0, 0);
9718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9719
9720 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9721 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9722
9723 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9724 IEM_MC_FPU_STACK_INC_TOP();
9725 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9726
9727 IEM_MC_ADVANCE_RIP_AND_FINISH();
9728 IEM_MC_END();
9729}
9730
9731
9732/** Opcode 0xd9 0xf8. */
9733FNIEMOP_DEF(iemOp_fprem)
9734{
9735 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
9736 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
9737}
9738
9739
9740/** Opcode 0xd9 0xf9. */
9741FNIEMOP_DEF(iemOp_fyl2xp1)
9742{
9743 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
9744 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
9745}
9746
9747
9748/** Opcode 0xd9 0xfa. */
9749FNIEMOP_DEF(iemOp_fsqrt)
9750{
9751 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
9752 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
9753}
9754
9755
9756/** Opcode 0xd9 0xfb. */
9757FNIEMOP_DEF(iemOp_fsincos)
9758{
9759 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
9760 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
9761}
9762
9763
9764/** Opcode 0xd9 0xfc. */
9765FNIEMOP_DEF(iemOp_frndint)
9766{
9767 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
9768 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
9769}
9770
9771
9772/** Opcode 0xd9 0xfd. */
9773FNIEMOP_DEF(iemOp_fscale)
9774{
9775 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
9776 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
9777}
9778
9779
9780/** Opcode 0xd9 0xfe. */
9781FNIEMOP_DEF(iemOp_fsin)
9782{
9783 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
9784 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
9785}
9786
9787
9788/** Opcode 0xd9 0xff. */
9789FNIEMOP_DEF(iemOp_fcos)
9790{
9791 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
9792 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
9793}
9794
9795
9796/** Used by iemOp_EscF1. */
9797IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
9798{
9799 /* 0xe0 */ iemOp_fchs,
9800 /* 0xe1 */ iemOp_fabs,
9801 /* 0xe2 */ iemOp_Invalid,
9802 /* 0xe3 */ iemOp_Invalid,
9803 /* 0xe4 */ iemOp_ftst,
9804 /* 0xe5 */ iemOp_fxam,
9805 /* 0xe6 */ iemOp_Invalid,
9806 /* 0xe7 */ iemOp_Invalid,
9807 /* 0xe8 */ iemOp_fld1,
9808 /* 0xe9 */ iemOp_fldl2t,
9809 /* 0xea */ iemOp_fldl2e,
9810 /* 0xeb */ iemOp_fldpi,
9811 /* 0xec */ iemOp_fldlg2,
9812 /* 0xed */ iemOp_fldln2,
9813 /* 0xee */ iemOp_fldz,
9814 /* 0xef */ iemOp_Invalid,
9815 /* 0xf0 */ iemOp_f2xm1,
9816 /* 0xf1 */ iemOp_fyl2x,
9817 /* 0xf2 */ iemOp_fptan,
9818 /* 0xf3 */ iemOp_fpatan,
9819 /* 0xf4 */ iemOp_fxtract,
9820 /* 0xf5 */ iemOp_fprem1,
9821 /* 0xf6 */ iemOp_fdecstp,
9822 /* 0xf7 */ iemOp_fincstp,
9823 /* 0xf8 */ iemOp_fprem,
9824 /* 0xf9 */ iemOp_fyl2xp1,
9825 /* 0xfa */ iemOp_fsqrt,
9826 /* 0xfb */ iemOp_fsincos,
9827 /* 0xfc */ iemOp_frndint,
9828 /* 0xfd */ iemOp_fscale,
9829 /* 0xfe */ iemOp_fsin,
9830 /* 0xff */ iemOp_fcos
9831};
9832
9833
9834/**
9835 * @opcode 0xd9
9836 */
9837FNIEMOP_DEF(iemOp_EscF1)
9838{
9839 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9840 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
9841
9842 if (IEM_IS_MODRM_REG_MODE(bRm))
9843 {
9844 switch (IEM_GET_MODRM_REG_8(bRm))
9845 {
9846 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
9847 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
9848 case 2:
9849 if (bRm == 0xd0)
9850 return FNIEMOP_CALL(iemOp_fnop);
9851 IEMOP_RAISE_INVALID_OPCODE_RET();
9852 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
9853 case 4:
9854 case 5:
9855 case 6:
9856 case 7:
9857 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
9858 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
9859 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9860 }
9861 }
9862 else
9863 {
9864 switch (IEM_GET_MODRM_REG_8(bRm))
9865 {
9866 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
9867 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
9868 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
9869 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
9870 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
9871 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
9872 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
9873 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
9874 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9875 }
9876 }
9877}
9878
9879
9880/** Opcode 0xda 11/0. */
9881FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
9882{
9883 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
9884 IEM_MC_BEGIN(0, 1, 0, 0);
9885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9886 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9887
9888 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9889 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9890
9891 IEM_MC_PREPARE_FPU_USAGE();
9892 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9893 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9894 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9895 } IEM_MC_ENDIF();
9896 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9897 } IEM_MC_ELSE() {
9898 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9899 } IEM_MC_ENDIF();
9900 IEM_MC_ADVANCE_RIP_AND_FINISH();
9901
9902 IEM_MC_END();
9903}
9904
9905
9906/** Opcode 0xda 11/1. */
9907FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
9908{
9909 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
9910 IEM_MC_BEGIN(0, 1, 0, 0);
9911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9912 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9913
9914 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9915 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9916
9917 IEM_MC_PREPARE_FPU_USAGE();
9918 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9919 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9920 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9921 } IEM_MC_ENDIF();
9922 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9923 } IEM_MC_ELSE() {
9924 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9925 } IEM_MC_ENDIF();
9926 IEM_MC_ADVANCE_RIP_AND_FINISH();
9927
9928 IEM_MC_END();
9929}
9930
9931
9932/** Opcode 0xda 11/2. */
9933FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
9934{
9935 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
9936 IEM_MC_BEGIN(0, 1, 0, 0);
9937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9938 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9939
9940 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9941 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9942
9943 IEM_MC_PREPARE_FPU_USAGE();
9944 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9945 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9946 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9947 } IEM_MC_ENDIF();
9948 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9949 } IEM_MC_ELSE() {
9950 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9951 } IEM_MC_ENDIF();
9952 IEM_MC_ADVANCE_RIP_AND_FINISH();
9953
9954 IEM_MC_END();
9955}
9956
9957
9958/** Opcode 0xda 11/3. */
9959FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
9960{
9961 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
9962 IEM_MC_BEGIN(0, 1, 0, 0);
9963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9964 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9965
9966 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9967 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9968
9969 IEM_MC_PREPARE_FPU_USAGE();
9970 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9971 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9972 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9973 } IEM_MC_ENDIF();
9974 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9975 } IEM_MC_ELSE() {
9976 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9977 } IEM_MC_ENDIF();
9978 IEM_MC_ADVANCE_RIP_AND_FINISH();
9979
9980 IEM_MC_END();
9981}
9982
9983
9984/**
9985 * Common worker for FPU instructions working on ST0 and ST1, only affecting
9986 * flags, and popping twice when done.
9987 *
9988 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9989 */
9990FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9991{
9992 IEM_MC_BEGIN(3, 1, 0, 0);
9993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9994 IEM_MC_LOCAL(uint16_t, u16Fsw);
9995 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9996 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9997 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9998
9999 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10000 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10001
10002 IEM_MC_PREPARE_FPU_USAGE();
10003 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
10004 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10005 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10006 } IEM_MC_ELSE() {
10007 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
10008 } IEM_MC_ENDIF();
10009 IEM_MC_ADVANCE_RIP_AND_FINISH();
10010
10011 IEM_MC_END();
10012}
10013
10014
10015/** Opcode 0xda 0xe9. */
10016FNIEMOP_DEF(iemOp_fucompp)
10017{
10018 IEMOP_MNEMONIC(fucompp, "fucompp");
10019 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
10020}
10021
10022
10023/**
10024 * Common worker for FPU instructions working on ST0 and an m32i, and storing
10025 * the result in ST0.
10026 *
10027 * @param bRm Mod R/M byte.
10028 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10029 */
10030FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
10031{
10032 IEM_MC_BEGIN(3, 3, 0, 0);
10033 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10034 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10035 IEM_MC_LOCAL(int32_t, i32Val2);
10036 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10037 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10038 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10039
10040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10042
10043 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10044 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10045 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10046
10047 IEM_MC_PREPARE_FPU_USAGE();
10048 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10049 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
10050 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10051 } IEM_MC_ELSE() {
10052 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10053 } IEM_MC_ENDIF();
10054 IEM_MC_ADVANCE_RIP_AND_FINISH();
10055
10056 IEM_MC_END();
10057}
10058
10059
10060/** Opcode 0xda !11/0. */
10061FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
10062{
10063 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
10064 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
10065}
10066
10067
10068/** Opcode 0xda !11/1. */
10069FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
10070{
10071 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
10072 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
10073}
10074
10075
10076/** Opcode 0xda !11/2. */
10077FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
10078{
10079 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
10080
10081 IEM_MC_BEGIN(3, 3, 0, 0);
10082 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10083 IEM_MC_LOCAL(uint16_t, u16Fsw);
10084 IEM_MC_LOCAL(int32_t, i32Val2);
10085 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10086 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10087 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10088
10089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10091
10092 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10093 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10094 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10095
10096 IEM_MC_PREPARE_FPU_USAGE();
10097 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10098 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10099 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10100 } IEM_MC_ELSE() {
10101 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10102 } IEM_MC_ENDIF();
10103 IEM_MC_ADVANCE_RIP_AND_FINISH();
10104
10105 IEM_MC_END();
10106}
10107
10108
10109/** Opcode 0xda !11/3. */
10110FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
10111{
10112 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
10113
10114 IEM_MC_BEGIN(3, 3, 0, 0);
10115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10116 IEM_MC_LOCAL(uint16_t, u16Fsw);
10117 IEM_MC_LOCAL(int32_t, i32Val2);
10118 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10119 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10120 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10121
10122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10124
10125 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10126 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10127 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10128
10129 IEM_MC_PREPARE_FPU_USAGE();
10130 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10131 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10132 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10133 } IEM_MC_ELSE() {
10134 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10135 } IEM_MC_ENDIF();
10136 IEM_MC_ADVANCE_RIP_AND_FINISH();
10137
10138 IEM_MC_END();
10139}
10140
10141
10142/** Opcode 0xda !11/4. */
10143FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
10144{
10145 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
10146 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
10147}
10148
10149
10150/** Opcode 0xda !11/5. */
10151FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
10152{
10153 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
10154 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
10155}
10156
10157
10158/** Opcode 0xda !11/6. */
10159FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
10160{
10161 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
10162 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
10163}
10164
10165
10166/** Opcode 0xda !11/7. */
10167FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
10168{
10169 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
10170 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
10171}
10172
10173
10174/**
10175 * @opcode 0xda
10176 */
10177FNIEMOP_DEF(iemOp_EscF2)
10178{
10179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10180 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
10181 if (IEM_IS_MODRM_REG_MODE(bRm))
10182 {
10183 switch (IEM_GET_MODRM_REG_8(bRm))
10184 {
10185 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
10186 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
10187 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
10188 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
10189 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10190 case 5:
10191 if (bRm == 0xe9)
10192 return FNIEMOP_CALL(iemOp_fucompp);
10193 IEMOP_RAISE_INVALID_OPCODE_RET();
10194 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10195 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10196 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10197 }
10198 }
10199 else
10200 {
10201 switch (IEM_GET_MODRM_REG_8(bRm))
10202 {
10203 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
10204 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
10205 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
10206 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
10207 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
10208 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
10209 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
10210 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
10211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10212 }
10213 }
10214}
10215
10216
10217/** Opcode 0xdb !11/0. */
10218FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
10219{
10220 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
10221
10222 IEM_MC_BEGIN(2, 3, 0, 0);
10223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10224 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10225 IEM_MC_LOCAL(int32_t, i32Val);
10226 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10227 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
10228
10229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10231
10232 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10233 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10234 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10235
10236 IEM_MC_PREPARE_FPU_USAGE();
10237 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10238 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
10239 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10240 } IEM_MC_ELSE() {
10241 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10242 } IEM_MC_ENDIF();
10243 IEM_MC_ADVANCE_RIP_AND_FINISH();
10244
10245 IEM_MC_END();
10246}
10247
10248
10249/** Opcode 0xdb !11/1. */
10250FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
10251{
10252 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
10253 IEM_MC_BEGIN(3, 2, 0, 0);
10254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10255 IEM_MC_LOCAL(uint16_t, u16Fsw);
10256 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10257 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10258 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10259
10260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10262 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10263 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10264
10265 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10266 IEM_MC_PREPARE_FPU_USAGE();
10267 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10268 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10269 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10270 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10271 } IEM_MC_ELSE() {
10272 IEM_MC_IF_FCW_IM() {
10273 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10274 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10275 } IEM_MC_ENDIF();
10276 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10277 } IEM_MC_ENDIF();
10278 IEM_MC_ADVANCE_RIP_AND_FINISH();
10279
10280 IEM_MC_END();
10281}
10282
10283
10284/** Opcode 0xdb !11/2. */
10285FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
10286{
10287 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
10288 IEM_MC_BEGIN(3, 2, 0, 0);
10289 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10290 IEM_MC_LOCAL(uint16_t, u16Fsw);
10291 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10292 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10293 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10294
10295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10297 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10298 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10299
10300 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10301 IEM_MC_PREPARE_FPU_USAGE();
10302 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10303 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10304 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10305 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10306 } IEM_MC_ELSE() {
10307 IEM_MC_IF_FCW_IM() {
10308 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10309 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10310 } IEM_MC_ENDIF();
10311 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10312 } IEM_MC_ENDIF();
10313 IEM_MC_ADVANCE_RIP_AND_FINISH();
10314
10315 IEM_MC_END();
10316}
10317
10318
10319/** Opcode 0xdb !11/3. */
10320FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
10321{
10322 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
10323 IEM_MC_BEGIN(3, 2, 0, 0);
10324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10325 IEM_MC_LOCAL(uint16_t, u16Fsw);
10326 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10327 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10328 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10329
10330 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10332 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10333 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10334
10335 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10336 IEM_MC_PREPARE_FPU_USAGE();
10337 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10338 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10339 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10340 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10341 } IEM_MC_ELSE() {
10342 IEM_MC_IF_FCW_IM() {
10343 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10344 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10345 } IEM_MC_ENDIF();
10346 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10347 } IEM_MC_ENDIF();
10348 IEM_MC_ADVANCE_RIP_AND_FINISH();
10349
10350 IEM_MC_END();
10351}
10352
10353
10354/** Opcode 0xdb !11/5. */
10355FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
10356{
10357 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
10358
10359 IEM_MC_BEGIN(2, 3, 0, 0);
10360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10361 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10362 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
10363 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10364 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
10365
10366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10368
10369 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10370 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10371 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10372
10373 IEM_MC_PREPARE_FPU_USAGE();
10374 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10375 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
10376 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10377 } IEM_MC_ELSE() {
10378 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10379 } IEM_MC_ENDIF();
10380 IEM_MC_ADVANCE_RIP_AND_FINISH();
10381
10382 IEM_MC_END();
10383}
10384
10385
10386/** Opcode 0xdb !11/7. */
10387FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
10388{
10389 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
10390 IEM_MC_BEGIN(3, 2, 0, 0);
10391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10392 IEM_MC_LOCAL(uint16_t, u16Fsw);
10393 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10394 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
10395 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10396
10397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10399 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10400 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10401
10402 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
10403 IEM_MC_PREPARE_FPU_USAGE();
10404 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10405 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
10406 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
10407 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10408 } IEM_MC_ELSE() {
10409 IEM_MC_IF_FCW_IM() {
10410 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
10411 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
10412 } IEM_MC_ENDIF();
10413 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10414 } IEM_MC_ENDIF();
10415 IEM_MC_ADVANCE_RIP_AND_FINISH();
10416
10417 IEM_MC_END();
10418}
10419
10420
10421/** Opcode 0xdb 11/0. */
10422FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
10423{
10424 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
10425 IEM_MC_BEGIN(0, 1, 0, 0);
10426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10427 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10428
10429 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10430 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10431
10432 IEM_MC_PREPARE_FPU_USAGE();
10433 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10434 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
10435 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10436 } IEM_MC_ENDIF();
10437 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10438 } IEM_MC_ELSE() {
10439 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10440 } IEM_MC_ENDIF();
10441 IEM_MC_ADVANCE_RIP_AND_FINISH();
10442
10443 IEM_MC_END();
10444}
10445
10446
10447/** Opcode 0xdb 11/1. */
10448FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
10449{
10450 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
10451 IEM_MC_BEGIN(0, 1, 0, 0);
10452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10453 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10454
10455 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10456 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10457
10458 IEM_MC_PREPARE_FPU_USAGE();
10459 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10460 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10461 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10462 } IEM_MC_ENDIF();
10463 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10464 } IEM_MC_ELSE() {
10465 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10466 } IEM_MC_ENDIF();
10467 IEM_MC_ADVANCE_RIP_AND_FINISH();
10468
10469 IEM_MC_END();
10470}
10471
10472
10473/** Opcode 0xdb 11/2. */
10474FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
10475{
10476 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
10477 IEM_MC_BEGIN(0, 1, 0, 0);
10478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10479 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10480
10481 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10482 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10483
10484 IEM_MC_PREPARE_FPU_USAGE();
10485 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10486 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10487 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10488 } IEM_MC_ENDIF();
10489 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10490 } IEM_MC_ELSE() {
10491 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10492 } IEM_MC_ENDIF();
10493 IEM_MC_ADVANCE_RIP_AND_FINISH();
10494
10495 IEM_MC_END();
10496}
10497
10498
10499/** Opcode 0xdb 11/3. */
10500FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
10501{
10502 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
10503 IEM_MC_BEGIN(0, 1, 0, 0);
10504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10505 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10506
10507 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10508 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10509
10510 IEM_MC_PREPARE_FPU_USAGE();
10511 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10512 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
10513 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10514 } IEM_MC_ENDIF();
10515 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10516 } IEM_MC_ELSE() {
10517 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10518 } IEM_MC_ENDIF();
10519 IEM_MC_ADVANCE_RIP_AND_FINISH();
10520
10521 IEM_MC_END();
10522}
10523
10524
10525/** Opcode 0xdb 0xe0. */
10526FNIEMOP_DEF(iemOp_fneni)
10527{
10528 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
10529 IEM_MC_BEGIN(0, 0, 0, 0);
10530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10531 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10532 IEM_MC_ADVANCE_RIP_AND_FINISH();
10533 IEM_MC_END();
10534}
10535
10536
10537/** Opcode 0xdb 0xe1. */
10538FNIEMOP_DEF(iemOp_fndisi)
10539{
10540 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
10541 IEM_MC_BEGIN(0, 0, 0, 0);
10542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10543 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10544 IEM_MC_ADVANCE_RIP_AND_FINISH();
10545 IEM_MC_END();
10546}
10547
10548
10549/** Opcode 0xdb 0xe2. */
10550FNIEMOP_DEF(iemOp_fnclex)
10551{
10552 IEMOP_MNEMONIC(fnclex, "fnclex");
10553 IEM_MC_BEGIN(0, 0, 0, 0);
10554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10555 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10556 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10557 IEM_MC_CLEAR_FSW_EX();
10558 IEM_MC_ADVANCE_RIP_AND_FINISH();
10559 IEM_MC_END();
10560}
10561
10562
10563/** Opcode 0xdb 0xe3. */
10564FNIEMOP_DEF(iemOp_fninit)
10565{
10566 IEMOP_MNEMONIC(fninit, "fninit");
10567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10568 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, iemCImpl_finit, false /*fCheckXcpts*/);
10569}
10570
10571
10572/** Opcode 0xdb 0xe4. */
10573FNIEMOP_DEF(iemOp_fnsetpm)
10574{
10575 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
10576 IEM_MC_BEGIN(0, 0, 0, 0);
10577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10578 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10579 IEM_MC_ADVANCE_RIP_AND_FINISH();
10580 IEM_MC_END();
10581}
10582
10583
10584/** Opcode 0xdb 0xe5. */
10585FNIEMOP_DEF(iemOp_frstpm)
10586{
10587 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
10588#if 0 /* #UDs on newer CPUs */
10589 IEM_MC_BEGIN(0, 0, 0, 0);
10590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10591 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10592 IEM_MC_ADVANCE_RIP_AND_FINISH();
10593 IEM_MC_END();
10594 return VINF_SUCCESS;
10595#else
10596 IEMOP_RAISE_INVALID_OPCODE_RET();
10597#endif
10598}
10599
10600
10601/** Opcode 0xdb 11/5. */
10602FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
10603{
10604 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
10605 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10606 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
10607 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10608}
10609
10610
10611/** Opcode 0xdb 11/6. */
10612FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
10613{
10614 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
10615 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10616 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
10617 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10618}
10619
10620
10621/**
10622 * @opcode 0xdb
10623 */
10624FNIEMOP_DEF(iemOp_EscF3)
10625{
10626 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10627 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
10628 if (IEM_IS_MODRM_REG_MODE(bRm))
10629 {
10630 switch (IEM_GET_MODRM_REG_8(bRm))
10631 {
10632 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
10633 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
10634 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
10635 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
10636 case 4:
10637 switch (bRm)
10638 {
10639 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
10640 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
10641 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
10642 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
10643 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
10644 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
10645 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
10646 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
10647 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10648 }
10649 break;
10650 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
10651 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
10652 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10653 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10654 }
10655 }
10656 else
10657 {
10658 switch (IEM_GET_MODRM_REG_8(bRm))
10659 {
10660 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
10661 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
10662 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
10663 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
10664 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10665 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
10666 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10667 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
10668 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10669 }
10670 }
10671}
10672
10673
10674/**
10675 * Common worker for FPU instructions working on STn and ST0, and storing the
10676 * result in STn unless IE, DE or ZE was raised.
10677 *
10678 * @param bRm Mod R/M byte.
10679 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10680 */
10681FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10682{
10683 IEM_MC_BEGIN(3, 1, 0, 0);
10684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10685 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10686 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10687 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10688 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10689
10690 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10691 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10692
10693 IEM_MC_PREPARE_FPU_USAGE();
10694 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10695 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10696 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10697 } IEM_MC_ELSE() {
10698 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10699 } IEM_MC_ENDIF();
10700 IEM_MC_ADVANCE_RIP_AND_FINISH();
10701
10702 IEM_MC_END();
10703}
10704
10705
10706/** Opcode 0xdc 11/0. */
10707FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
10708{
10709 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
10710 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
10711}
10712
10713
10714/** Opcode 0xdc 11/1. */
10715FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
10716{
10717 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
10718 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
10719}
10720
10721
10722/** Opcode 0xdc 11/4. */
10723FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
10724{
10725 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
10726 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
10727}
10728
10729
10730/** Opcode 0xdc 11/5. */
10731FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
10732{
10733 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
10734 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
10735}
10736
10737
10738/** Opcode 0xdc 11/6. */
10739FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
10740{
10741 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
10742 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
10743}
10744
10745
10746/** Opcode 0xdc 11/7. */
10747FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
10748{
10749 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
10750 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
10751}
10752
10753
10754/**
10755 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
10756 * memory operand, and storing the result in ST0.
10757 *
10758 * @param bRm Mod R/M byte.
10759 * @param pfnImpl Pointer to the instruction implementation (assembly).
10760 */
10761FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
10762{
10763 IEM_MC_BEGIN(3, 3, 0, 0);
10764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10765 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10766 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
10767 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10768 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
10769 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
10770
10771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10773 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10774 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10775
10776 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10777 IEM_MC_PREPARE_FPU_USAGE();
10778 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
10779 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
10780 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10781 } IEM_MC_ELSE() {
10782 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10783 } IEM_MC_ENDIF();
10784 IEM_MC_ADVANCE_RIP_AND_FINISH();
10785
10786 IEM_MC_END();
10787}
10788
10789
10790/** Opcode 0xdc !11/0. */
10791FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
10792{
10793 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
10794 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
10795}
10796
10797
10798/** Opcode 0xdc !11/1. */
10799FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
10800{
10801 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
10802 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
10803}
10804
10805
10806/** Opcode 0xdc !11/2. */
10807FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
10808{
10809 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
10810
10811 IEM_MC_BEGIN(3, 3, 0, 0);
10812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10813 IEM_MC_LOCAL(uint16_t, u16Fsw);
10814 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10815 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10816 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10817 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10818
10819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10821
10822 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10823 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10824 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10825
10826 IEM_MC_PREPARE_FPU_USAGE();
10827 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10828 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10829 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10830 } IEM_MC_ELSE() {
10831 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10832 } IEM_MC_ENDIF();
10833 IEM_MC_ADVANCE_RIP_AND_FINISH();
10834
10835 IEM_MC_END();
10836}
10837
10838
10839/** Opcode 0xdc !11/3. */
10840FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
10841{
10842 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
10843
10844 IEM_MC_BEGIN(3, 3, 0, 0);
10845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10846 IEM_MC_LOCAL(uint16_t, u16Fsw);
10847 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10848 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10849 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10850 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10851
10852 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10854
10855 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10856 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10857 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10858
10859 IEM_MC_PREPARE_FPU_USAGE();
10860 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10861 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10862 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10863 } IEM_MC_ELSE() {
10864 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10865 } IEM_MC_ENDIF();
10866 IEM_MC_ADVANCE_RIP_AND_FINISH();
10867
10868 IEM_MC_END();
10869}
10870
10871
10872/** Opcode 0xdc !11/4. */
10873FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
10874{
10875 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
10876 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
10877}
10878
10879
10880/** Opcode 0xdc !11/5. */
10881FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
10882{
10883 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
10884 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
10885}
10886
10887
10888/** Opcode 0xdc !11/6. */
10889FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
10890{
10891 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
10892 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
10893}
10894
10895
10896/** Opcode 0xdc !11/7. */
10897FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
10898{
10899 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
10900 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
10901}
10902
10903
10904/**
10905 * @opcode 0xdc
10906 */
10907FNIEMOP_DEF(iemOp_EscF4)
10908{
10909 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10910 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
10911 if (IEM_IS_MODRM_REG_MODE(bRm))
10912 {
10913 switch (IEM_GET_MODRM_REG_8(bRm))
10914 {
10915 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
10916 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
10917 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
10918 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
10919 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
10920 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
10921 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
10922 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
10923 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10924 }
10925 }
10926 else
10927 {
10928 switch (IEM_GET_MODRM_REG_8(bRm))
10929 {
10930 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
10931 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
10932 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
10933 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
10934 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
10935 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
10936 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
10937 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
10938 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10939 }
10940 }
10941}
10942
10943
10944/** Opcode 0xdd !11/0.
10945 * @sa iemOp_fld_m32r */
10946FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
10947{
10948 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
10949
10950 IEM_MC_BEGIN(2, 3, 0, 0);
10951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10952 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10953 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
10954 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10955 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
10956
10957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10959 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10960 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10961
10962 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10963 IEM_MC_PREPARE_FPU_USAGE();
10964 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10965 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
10966 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10967 } IEM_MC_ELSE() {
10968 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10969 } IEM_MC_ENDIF();
10970 IEM_MC_ADVANCE_RIP_AND_FINISH();
10971
10972 IEM_MC_END();
10973}
10974
10975
10976/** Opcode 0xdd !11/0. */
10977FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
10978{
10979 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
10980 IEM_MC_BEGIN(3, 2, 0, 0);
10981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10982 IEM_MC_LOCAL(uint16_t, u16Fsw);
10983 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10984 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10985 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10986
10987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10989 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10990 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10991
10992 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10993 IEM_MC_PREPARE_FPU_USAGE();
10994 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10995 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10996 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10997 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10998 } IEM_MC_ELSE() {
10999 IEM_MC_IF_FCW_IM() {
11000 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11001 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
11002 } IEM_MC_ENDIF();
11003 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11004 } IEM_MC_ENDIF();
11005 IEM_MC_ADVANCE_RIP_AND_FINISH();
11006
11007 IEM_MC_END();
11008}
11009
11010
11011/** Opcode 0xdd !11/0. */
11012FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
11013{
11014 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
11015 IEM_MC_BEGIN(3, 2, 0, 0);
11016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11017 IEM_MC_LOCAL(uint16_t, u16Fsw);
11018 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11019 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11020 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11021
11022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11024 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11025 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11026
11027 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11028 IEM_MC_PREPARE_FPU_USAGE();
11029 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11030 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11031 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11032 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11033 } IEM_MC_ELSE() {
11034 IEM_MC_IF_FCW_IM() {
11035 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11036 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11037 } IEM_MC_ENDIF();
11038 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11039 } IEM_MC_ENDIF();
11040 IEM_MC_ADVANCE_RIP_AND_FINISH();
11041
11042 IEM_MC_END();
11043}
11044
11045
11046
11047
11048/** Opcode 0xdd !11/0. */
11049FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
11050{
11051 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
11052 IEM_MC_BEGIN(3, 2, 0, 0);
11053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11054 IEM_MC_LOCAL(uint16_t, u16Fsw);
11055 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11056 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11057 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11058
11059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11061 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11062 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11063
11064 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11065 IEM_MC_PREPARE_FPU_USAGE();
11066 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11067 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11068 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11069 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11070 } IEM_MC_ELSE() {
11071 IEM_MC_IF_FCW_IM() {
11072 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11073 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11074 } IEM_MC_ENDIF();
11075 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11076 } IEM_MC_ENDIF();
11077 IEM_MC_ADVANCE_RIP_AND_FINISH();
11078
11079 IEM_MC_END();
11080}
11081
11082
11083/** Opcode 0xdd !11/0. */
11084FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
11085{
11086 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
11087 IEM_MC_BEGIN(3, 0, 0, 0);
11088 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
11089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11090
11091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11092 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11093 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11094
11095 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11096 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11097 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
11098 IEM_MC_END();
11099}
11100
11101
11102/** Opcode 0xdd !11/0. */
11103FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
11104{
11105 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
11106 IEM_MC_BEGIN(3, 0, 0, 0);
11107 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
11108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11109
11110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11111 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11112 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
11113
11114 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11115 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11116 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
11117 IEM_MC_END();
11118}
11119
11120/** Opcode 0xdd !11/0. */
11121FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
11122{
11123 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
11124
11125 IEM_MC_BEGIN(0, 2, 0, 0);
11126 IEM_MC_LOCAL(uint16_t, u16Tmp);
11127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11128
11129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11131 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11132
11133 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11134 IEM_MC_FETCH_FSW(u16Tmp);
11135 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
11136 IEM_MC_ADVANCE_RIP_AND_FINISH();
11137
11138/** @todo Debug / drop a hint to the verifier that things may differ
11139 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
11140 * NT4SP1. (X86_FSW_PE) */
11141 IEM_MC_END();
11142}
11143
11144
11145/** Opcode 0xdd 11/0. */
11146FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
11147{
11148 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
11149 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
11150 unmodified. */
11151 IEM_MC_BEGIN(0, 0, 0, 0);
11152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11153
11154 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11155 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11156
11157 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11158 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11159 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11160
11161 IEM_MC_ADVANCE_RIP_AND_FINISH();
11162 IEM_MC_END();
11163}
11164
11165
11166/** Opcode 0xdd 11/1. */
11167FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
11168{
11169 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
11170 IEM_MC_BEGIN(0, 2, 0, 0);
11171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11172 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11173 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11174 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11175 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11176
11177 IEM_MC_PREPARE_FPU_USAGE();
11178 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11179 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11180 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11181 } IEM_MC_ELSE() {
11182 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11183 } IEM_MC_ENDIF();
11184
11185 IEM_MC_ADVANCE_RIP_AND_FINISH();
11186 IEM_MC_END();
11187}
11188
11189
11190/** Opcode 0xdd 11/3. */
11191FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
11192{
11193 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
11194 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
11195}
11196
11197
11198/** Opcode 0xdd 11/4. */
11199FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
11200{
11201 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
11202 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
11203}
11204
11205
11206/**
11207 * @opcode 0xdd
11208 */
11209FNIEMOP_DEF(iemOp_EscF5)
11210{
11211 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11212 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
11213 if (IEM_IS_MODRM_REG_MODE(bRm))
11214 {
11215 switch (IEM_GET_MODRM_REG_8(bRm))
11216 {
11217 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
11218 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
11219 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
11220 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
11221 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
11222 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
11223 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11224 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11225 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11226 }
11227 }
11228 else
11229 {
11230 switch (IEM_GET_MODRM_REG_8(bRm))
11231 {
11232 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
11233 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
11234 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
11235 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
11236 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
11237 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
11238 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
11239 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
11240 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11241 }
11242 }
11243}
11244
11245
11246/** Opcode 0xde 11/0. */
11247FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
11248{
11249 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
11250 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
11251}
11252
11253
11254/** Opcode 0xde 11/0. */
11255FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
11256{
11257 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
11258 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
11259}
11260
11261
11262/** Opcode 0xde 0xd9. */
11263FNIEMOP_DEF(iemOp_fcompp)
11264{
11265 IEMOP_MNEMONIC(fcompp, "fcompp");
11266 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
11267}
11268
11269
11270/** Opcode 0xde 11/4. */
11271FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
11272{
11273 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
11274 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
11275}
11276
11277
11278/** Opcode 0xde 11/5. */
11279FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
11280{
11281 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
11282 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
11283}
11284
11285
11286/** Opcode 0xde 11/6. */
11287FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
11288{
11289 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
11290 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
11291}
11292
11293
11294/** Opcode 0xde 11/7. */
11295FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
11296{
11297 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
11298 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
11299}
11300
11301
11302/**
11303 * Common worker for FPU instructions working on ST0 and an m16i, and storing
11304 * the result in ST0.
11305 *
11306 * @param bRm Mod R/M byte.
11307 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11308 */
11309FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
11310{
11311 IEM_MC_BEGIN(3, 3, 0, 0);
11312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11313 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11314 IEM_MC_LOCAL(int16_t, i16Val2);
11315 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11316 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11317 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11318
11319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11321
11322 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11323 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11324 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11325
11326 IEM_MC_PREPARE_FPU_USAGE();
11327 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11328 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
11329 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11330 } IEM_MC_ELSE() {
11331 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11332 } IEM_MC_ENDIF();
11333 IEM_MC_ADVANCE_RIP_AND_FINISH();
11334
11335 IEM_MC_END();
11336}
11337
11338
11339/** Opcode 0xde !11/0. */
11340FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
11341{
11342 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
11343 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
11344}
11345
11346
11347/** Opcode 0xde !11/1. */
11348FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
11349{
11350 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
11351 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
11352}
11353
11354
11355/** Opcode 0xde !11/2. */
11356FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
11357{
11358 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
11359
11360 IEM_MC_BEGIN(3, 3, 0, 0);
11361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11362 IEM_MC_LOCAL(uint16_t, u16Fsw);
11363 IEM_MC_LOCAL(int16_t, i16Val2);
11364 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11365 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11366 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11367
11368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11370
11371 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11372 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11373 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11374
11375 IEM_MC_PREPARE_FPU_USAGE();
11376 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11377 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11378 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11379 } IEM_MC_ELSE() {
11380 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11381 } IEM_MC_ENDIF();
11382 IEM_MC_ADVANCE_RIP_AND_FINISH();
11383
11384 IEM_MC_END();
11385}
11386
11387
11388/** Opcode 0xde !11/3. */
11389FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
11390{
11391 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
11392
11393 IEM_MC_BEGIN(3, 3, 0, 0);
11394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11395 IEM_MC_LOCAL(uint16_t, u16Fsw);
11396 IEM_MC_LOCAL(int16_t, i16Val2);
11397 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11398 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11399 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11400
11401 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11403
11404 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11405 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11406 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11407
11408 IEM_MC_PREPARE_FPU_USAGE();
11409 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11410 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11411 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11412 } IEM_MC_ELSE() {
11413 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11414 } IEM_MC_ENDIF();
11415 IEM_MC_ADVANCE_RIP_AND_FINISH();
11416
11417 IEM_MC_END();
11418}
11419
11420
11421/** Opcode 0xde !11/4. */
11422FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
11423{
11424 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
11425 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
11426}
11427
11428
11429/** Opcode 0xde !11/5. */
11430FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
11431{
11432 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
11433 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
11434}
11435
11436
11437/** Opcode 0xde !11/6. */
11438FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
11439{
11440 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
11441 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
11442}
11443
11444
11445/** Opcode 0xde !11/7. */
11446FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
11447{
11448 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
11449 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
11450}
11451
11452
11453/**
11454 * @opcode 0xde
11455 */
11456FNIEMOP_DEF(iemOp_EscF6)
11457{
11458 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11459 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
11460 if (IEM_IS_MODRM_REG_MODE(bRm))
11461 {
11462 switch (IEM_GET_MODRM_REG_8(bRm))
11463 {
11464 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
11465 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
11466 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
11467 case 3: if (bRm == 0xd9)
11468 return FNIEMOP_CALL(iemOp_fcompp);
11469 IEMOP_RAISE_INVALID_OPCODE_RET();
11470 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
11471 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
11472 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
11473 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
11474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11475 }
11476 }
11477 else
11478 {
11479 switch (IEM_GET_MODRM_REG_8(bRm))
11480 {
11481 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
11482 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
11483 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
11484 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
11485 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
11486 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
11487 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
11488 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
11489 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11490 }
11491 }
11492}
11493
11494
11495/** Opcode 0xdf 11/0.
11496 * Undocument instruction, assumed to work like ffree + fincstp. */
11497FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
11498{
11499 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
11500 IEM_MC_BEGIN(0, 0, 0, 0);
11501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11502
11503 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11504 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11505
11506 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11507 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11508 IEM_MC_FPU_STACK_INC_TOP();
11509 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11510
11511 IEM_MC_ADVANCE_RIP_AND_FINISH();
11512 IEM_MC_END();
11513}
11514
11515
11516/** Opcode 0xdf 0xe0. */
11517FNIEMOP_DEF(iemOp_fnstsw_ax)
11518{
11519 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
11520 IEM_MC_BEGIN(0, 1, 0, 0);
11521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11522 IEM_MC_LOCAL(uint16_t, u16Tmp);
11523 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11524 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11525 IEM_MC_FETCH_FSW(u16Tmp);
11526 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
11527 IEM_MC_ADVANCE_RIP_AND_FINISH();
11528 IEM_MC_END();
11529}
11530
11531
11532/** Opcode 0xdf 11/5. */
11533FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
11534{
11535 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
11536 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
11537 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11538 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11539}
11540
11541
11542/** Opcode 0xdf 11/6. */
11543FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
11544{
11545 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
11546 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
11547 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11548 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11549}
11550
11551
11552/** Opcode 0xdf !11/0. */
11553FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
11554{
11555 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
11556
11557 IEM_MC_BEGIN(2, 3, 0, 0);
11558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11559 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11560 IEM_MC_LOCAL(int16_t, i16Val);
11561 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11562 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
11563
11564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11566
11567 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11568 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11569 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11570
11571 IEM_MC_PREPARE_FPU_USAGE();
11572 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11573 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
11574 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11575 } IEM_MC_ELSE() {
11576 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11577 } IEM_MC_ENDIF();
11578 IEM_MC_ADVANCE_RIP_AND_FINISH();
11579
11580 IEM_MC_END();
11581}
11582
11583
11584/** Opcode 0xdf !11/1. */
11585FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
11586{
11587 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
11588 IEM_MC_BEGIN(3, 2, 0, 0);
11589 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11590 IEM_MC_LOCAL(uint16_t, u16Fsw);
11591 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11592 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11593 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11594
11595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11597 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11598 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11599
11600 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11601 IEM_MC_PREPARE_FPU_USAGE();
11602 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11603 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11604 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11605 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11606 } IEM_MC_ELSE() {
11607 IEM_MC_IF_FCW_IM() {
11608 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11609 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11610 } IEM_MC_ENDIF();
11611 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11612 } IEM_MC_ENDIF();
11613 IEM_MC_ADVANCE_RIP_AND_FINISH();
11614
11615 IEM_MC_END();
11616}
11617
11618
11619/** Opcode 0xdf !11/2. */
11620FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
11621{
11622 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
11623 IEM_MC_BEGIN(3, 2, 0, 0);
11624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11625 IEM_MC_LOCAL(uint16_t, u16Fsw);
11626 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11627 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11628 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11629
11630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11632 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11633 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11634
11635 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11636 IEM_MC_PREPARE_FPU_USAGE();
11637 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11638 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11639 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11640 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11641 } IEM_MC_ELSE() {
11642 IEM_MC_IF_FCW_IM() {
11643 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11644 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11645 } IEM_MC_ENDIF();
11646 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11647 } IEM_MC_ENDIF();
11648 IEM_MC_ADVANCE_RIP_AND_FINISH();
11649
11650 IEM_MC_END();
11651}
11652
11653
11654/** Opcode 0xdf !11/3. */
11655FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
11656{
11657 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
11658 IEM_MC_BEGIN(3, 2, 0, 0);
11659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11660 IEM_MC_LOCAL(uint16_t, u16Fsw);
11661 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11662 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11663 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11664
11665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11667 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11668 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11669
11670 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11671 IEM_MC_PREPARE_FPU_USAGE();
11672 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11673 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11674 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11675 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11676 } IEM_MC_ELSE() {
11677 IEM_MC_IF_FCW_IM() {
11678 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11679 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11680 } IEM_MC_ENDIF();
11681 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11682 } IEM_MC_ENDIF();
11683 IEM_MC_ADVANCE_RIP_AND_FINISH();
11684
11685 IEM_MC_END();
11686}
11687
11688
11689/** Opcode 0xdf !11/4. */
11690FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
11691{
11692 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
11693
11694 IEM_MC_BEGIN(2, 3, 0, 0);
11695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11696 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11697 IEM_MC_LOCAL(RTPBCD80U, d80Val);
11698 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11699 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
11700
11701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11703
11704 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11705 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11706 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11707
11708 IEM_MC_PREPARE_FPU_USAGE();
11709 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11710 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
11711 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11712 } IEM_MC_ELSE() {
11713 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11714 } IEM_MC_ENDIF();
11715 IEM_MC_ADVANCE_RIP_AND_FINISH();
11716
11717 IEM_MC_END();
11718}
11719
11720
11721/** Opcode 0xdf !11/5. */
11722FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
11723{
11724 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
11725
11726 IEM_MC_BEGIN(2, 3, 0, 0);
11727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11728 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11729 IEM_MC_LOCAL(int64_t, i64Val);
11730 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11731 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
11732
11733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11735
11736 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11737 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11738 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11739
11740 IEM_MC_PREPARE_FPU_USAGE();
11741 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11742 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
11743 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11744 } IEM_MC_ELSE() {
11745 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11746 } IEM_MC_ENDIF();
11747 IEM_MC_ADVANCE_RIP_AND_FINISH();
11748
11749 IEM_MC_END();
11750}
11751
11752
11753/** Opcode 0xdf !11/6. */
11754FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
11755{
11756 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
11757 IEM_MC_BEGIN(3, 2, 0, 0);
11758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11759 IEM_MC_LOCAL(uint16_t, u16Fsw);
11760 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11761 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
11762 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11763
11764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11766 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11767 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11768
11769 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
11770 IEM_MC_PREPARE_FPU_USAGE();
11771 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11772 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
11773 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
11774 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11775 } IEM_MC_ELSE() {
11776 IEM_MC_IF_FCW_IM() {
11777 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
11778 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
11779 } IEM_MC_ENDIF();
11780 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11781 } IEM_MC_ENDIF();
11782 IEM_MC_ADVANCE_RIP_AND_FINISH();
11783
11784 IEM_MC_END();
11785}
11786
11787
11788/** Opcode 0xdf !11/7. */
11789FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
11790{
11791 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
11792 IEM_MC_BEGIN(3, 2, 0, 0);
11793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11794 IEM_MC_LOCAL(uint16_t, u16Fsw);
11795 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11796 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11797 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11798
11799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11801 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11802 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11803
11804 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11805 IEM_MC_PREPARE_FPU_USAGE();
11806 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11807 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11808 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11809 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11810 } IEM_MC_ELSE() {
11811 IEM_MC_IF_FCW_IM() {
11812 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11813 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
11814 } IEM_MC_ENDIF();
11815 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11816 } IEM_MC_ENDIF();
11817 IEM_MC_ADVANCE_RIP_AND_FINISH();
11818
11819 IEM_MC_END();
11820}
11821
11822
11823/**
11824 * @opcode 0xdf
11825 */
11826FNIEMOP_DEF(iemOp_EscF7)
11827{
11828 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11829 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
11830 if (IEM_IS_MODRM_REG_MODE(bRm))
11831 {
11832 switch (IEM_GET_MODRM_REG_8(bRm))
11833 {
11834 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
11835 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
11836 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11837 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11838 case 4: if (bRm == 0xe0)
11839 return FNIEMOP_CALL(iemOp_fnstsw_ax);
11840 IEMOP_RAISE_INVALID_OPCODE_RET();
11841 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
11842 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
11843 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11844 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11845 }
11846 }
11847 else
11848 {
11849 switch (IEM_GET_MODRM_REG_8(bRm))
11850 {
11851 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
11852 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
11853 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
11854 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
11855 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
11856 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
11857 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
11858 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
11859 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11860 }
11861 }
11862}
11863
11864
11865/**
11866 * @opcode 0xe0
11867 */
11868FNIEMOP_DEF(iemOp_loopne_Jb)
11869{
11870 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
11871 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11872 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11873
11874 switch (pVCpu->iem.s.enmEffAddrMode)
11875 {
11876 case IEMMODE_16BIT:
11877 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
11878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11879 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11880 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11881 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11882 } IEM_MC_ELSE() {
11883 IEM_MC_ADVANCE_RIP_AND_FINISH();
11884 } IEM_MC_ENDIF();
11885 IEM_MC_END();
11886 break;
11887
11888 case IEMMODE_32BIT:
11889 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
11890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11891 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11892 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11893 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11894 } IEM_MC_ELSE() {
11895 IEM_MC_ADVANCE_RIP_AND_FINISH();
11896 } IEM_MC_ENDIF();
11897 IEM_MC_END();
11898 break;
11899
11900 case IEMMODE_64BIT:
11901 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
11902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11903 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11904 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11905 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11906 } IEM_MC_ELSE() {
11907 IEM_MC_ADVANCE_RIP_AND_FINISH();
11908 } IEM_MC_ENDIF();
11909 IEM_MC_END();
11910 break;
11911
11912 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11913 }
11914}
11915
11916
11917/**
11918 * @opcode 0xe1
11919 */
11920FNIEMOP_DEF(iemOp_loope_Jb)
11921{
11922 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
11923 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11924 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11925
11926 switch (pVCpu->iem.s.enmEffAddrMode)
11927 {
11928 case IEMMODE_16BIT:
11929 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
11930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11931 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11932 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11933 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11934 } IEM_MC_ELSE() {
11935 IEM_MC_ADVANCE_RIP_AND_FINISH();
11936 } IEM_MC_ENDIF();
11937 IEM_MC_END();
11938 break;
11939
11940 case IEMMODE_32BIT:
11941 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
11942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11943 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11944 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11945 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11946 } IEM_MC_ELSE() {
11947 IEM_MC_ADVANCE_RIP_AND_FINISH();
11948 } IEM_MC_ENDIF();
11949 IEM_MC_END();
11950 break;
11951
11952 case IEMMODE_64BIT:
11953 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
11954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11955 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11956 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11957 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11958 } IEM_MC_ELSE() {
11959 IEM_MC_ADVANCE_RIP_AND_FINISH();
11960 } IEM_MC_ENDIF();
11961 IEM_MC_END();
11962 break;
11963
11964 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11965 }
11966}
11967
11968
11969/**
11970 * @opcode 0xe2
11971 */
11972FNIEMOP_DEF(iemOp_loop_Jb)
11973{
11974 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
11975 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11976 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11977
11978 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
11979 * using the 32-bit operand size override. How can that be restarted? See
11980 * weird pseudo code in intel manual. */
11981
11982 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
11983 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
11984 * the loop causes guest crashes, but when logging it's nice to skip a few million
11985 * lines of useless output. */
11986#if defined(LOG_ENABLED)
11987 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
11988 switch (pVCpu->iem.s.enmEffAddrMode)
11989 {
11990 case IEMMODE_16BIT:
11991 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
11992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11993 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
11994 IEM_MC_ADVANCE_RIP_AND_FINISH();
11995 IEM_MC_END();
11996 break;
11997
11998 case IEMMODE_32BIT:
11999 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12001 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
12002 IEM_MC_ADVANCE_RIP_AND_FINISH();
12003 IEM_MC_END();
12004 break;
12005
12006 case IEMMODE_64BIT:
12007 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12009 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
12010 IEM_MC_ADVANCE_RIP_AND_FINISH();
12011 IEM_MC_END();
12012 break;
12013
12014 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12015 }
12016#endif
12017
12018 switch (pVCpu->iem.s.enmEffAddrMode)
12019 {
12020 case IEMMODE_16BIT:
12021 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12023 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12024 IEM_MC_IF_CX_IS_NZ() {
12025 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12026 } IEM_MC_ELSE() {
12027 IEM_MC_ADVANCE_RIP_AND_FINISH();
12028 } IEM_MC_ENDIF();
12029 IEM_MC_END();
12030 break;
12031
12032 case IEMMODE_32BIT:
12033 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12035 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12036 IEM_MC_IF_ECX_IS_NZ() {
12037 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12038 } IEM_MC_ELSE() {
12039 IEM_MC_ADVANCE_RIP_AND_FINISH();
12040 } IEM_MC_ENDIF();
12041 IEM_MC_END();
12042 break;
12043
12044 case IEMMODE_64BIT:
12045 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12047 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12048 IEM_MC_IF_RCX_IS_NZ() {
12049 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12050 } IEM_MC_ELSE() {
12051 IEM_MC_ADVANCE_RIP_AND_FINISH();
12052 } IEM_MC_ENDIF();
12053 IEM_MC_END();
12054 break;
12055
12056 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12057 }
12058}
12059
12060
12061/**
12062 * @opcode 0xe3
12063 */
12064FNIEMOP_DEF(iemOp_jecxz_Jb)
12065{
12066 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
12067 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12068 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12069
12070 switch (pVCpu->iem.s.enmEffAddrMode)
12071 {
12072 case IEMMODE_16BIT:
12073 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12075 IEM_MC_IF_CX_IS_NZ() {
12076 IEM_MC_ADVANCE_RIP_AND_FINISH();
12077 } IEM_MC_ELSE() {
12078 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12079 } IEM_MC_ENDIF();
12080 IEM_MC_END();
12081 break;
12082
12083 case IEMMODE_32BIT:
12084 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12086 IEM_MC_IF_ECX_IS_NZ() {
12087 IEM_MC_ADVANCE_RIP_AND_FINISH();
12088 } IEM_MC_ELSE() {
12089 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12090 } IEM_MC_ENDIF();
12091 IEM_MC_END();
12092 break;
12093
12094 case IEMMODE_64BIT:
12095 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12097 IEM_MC_IF_RCX_IS_NZ() {
12098 IEM_MC_ADVANCE_RIP_AND_FINISH();
12099 } IEM_MC_ELSE() {
12100 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12101 } IEM_MC_ENDIF();
12102 IEM_MC_END();
12103 break;
12104
12105 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12106 }
12107}
12108
12109
12110/** Opcode 0xe4 */
12111FNIEMOP_DEF(iemOp_in_AL_Ib)
12112{
12113 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
12114 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12116 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12117 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12118}
12119
12120
12121/** Opcode 0xe5 */
12122FNIEMOP_DEF(iemOp_in_eAX_Ib)
12123{
12124 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
12125 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12127 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12128 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12129 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12130}
12131
12132
12133/** Opcode 0xe6 */
12134FNIEMOP_DEF(iemOp_out_Ib_AL)
12135{
12136 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
12137 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12139 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12140 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12141}
12142
12143
12144/** Opcode 0xe7 */
12145FNIEMOP_DEF(iemOp_out_Ib_eAX)
12146{
12147 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
12148 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12150 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12151 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12152 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12153}
12154
12155
12156/**
12157 * @opcode 0xe8
12158 */
12159FNIEMOP_DEF(iemOp_call_Jv)
12160{
12161 IEMOP_MNEMONIC(call_Jv, "call Jv");
12162 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12163 switch (pVCpu->iem.s.enmEffOpSize)
12164 {
12165 case IEMMODE_16BIT:
12166 {
12167 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12168 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_16, (int16_t)u16Imm);
12169 }
12170
12171 case IEMMODE_32BIT:
12172 {
12173 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12174 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_32, (int32_t)u32Imm);
12175 }
12176
12177 case IEMMODE_64BIT:
12178 {
12179 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12180 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_64, u64Imm);
12181 }
12182
12183 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12184 }
12185}
12186
12187
12188/**
12189 * @opcode 0xe9
12190 */
12191FNIEMOP_DEF(iemOp_jmp_Jv)
12192{
12193 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
12194 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12195 switch (pVCpu->iem.s.enmEffOpSize)
12196 {
12197 case IEMMODE_16BIT:
12198 IEM_MC_BEGIN(0, 0, 0, 0);
12199 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
12200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12201 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
12202 IEM_MC_END();
12203 break;
12204
12205 case IEMMODE_64BIT:
12206 case IEMMODE_32BIT:
12207 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12208 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
12209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12210 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
12211 IEM_MC_END();
12212 break;
12213
12214 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12215 }
12216}
12217
12218
12219/**
12220 * @opcode 0xea
12221 */
12222FNIEMOP_DEF(iemOp_jmp_Ap)
12223{
12224 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
12225 IEMOP_HLP_NO_64BIT();
12226
12227 /* Decode the far pointer address and pass it on to the far call C implementation. */
12228 uint32_t off32Seg;
12229 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12230 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
12231 else
12232 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
12233 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
12234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12235 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
12236 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12237 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
12238}
12239
12240
12241/**
12242 * @opcode 0xeb
12243 */
12244FNIEMOP_DEF(iemOp_jmp_Jb)
12245{
12246 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
12247 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12248 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12249
12250 IEM_MC_BEGIN(0, 0, 0, 0);
12251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12252 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12253 IEM_MC_END();
12254}
12255
12256
12257/** Opcode 0xec */
12258FNIEMOP_DEF(iemOp_in_AL_DX)
12259{
12260 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
12261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12262 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12263 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
12264}
12265
12266
12267/** Opcode 0xed */
12268FNIEMOP_DEF(iemOp_in_eAX_DX)
12269{
12270 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
12271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12272 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12273 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12274 pVCpu->iem.s.enmEffAddrMode);
12275}
12276
12277
12278/** Opcode 0xee */
12279FNIEMOP_DEF(iemOp_out_DX_AL)
12280{
12281 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
12282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12283 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12284 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
12285}
12286
12287
12288/** Opcode 0xef */
12289FNIEMOP_DEF(iemOp_out_DX_eAX)
12290{
12291 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
12292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12293 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12294 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12295 pVCpu->iem.s.enmEffAddrMode);
12296}
12297
12298
12299/**
12300 * @opcode 0xf0
12301 */
12302FNIEMOP_DEF(iemOp_lock)
12303{
12304 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
12305 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12306 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
12307
12308 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12309 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12310}
12311
12312
12313/**
12314 * @opcode 0xf1
12315 */
12316FNIEMOP_DEF(iemOp_int1)
12317{
12318 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
12319 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
12320 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
12321 * LOADALL memo. Needs some testing. */
12322 IEMOP_HLP_MIN_386();
12323 /** @todo testcase! */
12324 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
12325 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
12326 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
12327}
12328
12329
12330/**
12331 * @opcode 0xf2
12332 */
12333FNIEMOP_DEF(iemOp_repne)
12334{
12335 /* This overrides any previous REPE prefix. */
12336 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
12337 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
12338 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
12339
12340 /* For the 4 entry opcode tables, REPNZ overrides any previous
12341 REPZ and operand size prefixes. */
12342 pVCpu->iem.s.idxPrefix = 3;
12343
12344 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12345 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12346}
12347
12348
12349/**
12350 * @opcode 0xf3
12351 */
12352FNIEMOP_DEF(iemOp_repe)
12353{
12354 /* This overrides any previous REPNE prefix. */
12355 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
12356 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
12357 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
12358
12359 /* For the 4 entry opcode tables, REPNZ overrides any previous
12360 REPNZ and operand size prefixes. */
12361 pVCpu->iem.s.idxPrefix = 2;
12362
12363 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12364 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12365}
12366
12367
12368/**
12369 * @opcode 0xf4
12370 */
12371FNIEMOP_DEF(iemOp_hlt)
12372{
12373 IEMOP_MNEMONIC(hlt, "hlt");
12374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12375 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, iemCImpl_hlt);
12376}
12377
12378
12379/**
12380 * @opcode 0xf5
12381 */
12382FNIEMOP_DEF(iemOp_cmc)
12383{
12384 IEMOP_MNEMONIC(cmc, "cmc");
12385 IEM_MC_BEGIN(0, 0, 0, 0);
12386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12387 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
12388 IEM_MC_ADVANCE_RIP_AND_FINISH();
12389 IEM_MC_END();
12390}
12391
12392
12393/**
12394 * Body for of 'inc/dec/not/neg Eb'.
12395 */
12396#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
12397 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
12398 { \
12399 /* register access */ \
12400 IEM_MC_BEGIN(2, 0, 0, 0); \
12401 IEMOP_HLP_DONE_DECODING(); \
12402 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12403 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12404 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
12405 IEM_MC_REF_EFLAGS(pEFlags); \
12406 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12407 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12408 IEM_MC_END(); \
12409 } \
12410 else \
12411 { \
12412 /* memory access. */ \
12413 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
12414 { \
12415 IEM_MC_BEGIN(2, 2, 0, 0); \
12416 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12417 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12418 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12419 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12420 \
12421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12422 IEMOP_HLP_DONE_DECODING(); \
12423 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12424 IEM_MC_FETCH_EFLAGS(EFlags); \
12425 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12426 \
12427 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12428 IEM_MC_COMMIT_EFLAGS(EFlags); \
12429 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12430 IEM_MC_END(); \
12431 } \
12432 else \
12433 { \
12434 IEM_MC_BEGIN(2, 2, 0, 0); \
12435 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12436 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12438 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12439 \
12440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12441 IEMOP_HLP_DONE_DECODING(); \
12442 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12443 IEM_MC_FETCH_EFLAGS(EFlags); \
12444 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
12445 \
12446 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12447 IEM_MC_COMMIT_EFLAGS(EFlags); \
12448 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12449 IEM_MC_END(); \
12450 } \
12451 } \
12452 (void)0
12453
12454
12455/**
12456 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
12457 */
12458#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
12459 if (IEM_IS_MODRM_REG_MODE(bRm)) \
12460 { \
12461 /* \
12462 * Register target \
12463 */ \
12464 switch (pVCpu->iem.s.enmEffOpSize) \
12465 { \
12466 case IEMMODE_16BIT: \
12467 IEM_MC_BEGIN(2, 0, 0, 0); \
12468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12469 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12470 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12471 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12472 IEM_MC_REF_EFLAGS(pEFlags); \
12473 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
12474 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12475 IEM_MC_END(); \
12476 break; \
12477 \
12478 case IEMMODE_32BIT: \
12479 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0); \
12480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12481 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12482 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12483 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12484 IEM_MC_REF_EFLAGS(pEFlags); \
12485 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
12486 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
12487 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12488 IEM_MC_END(); \
12489 break; \
12490 \
12491 case IEMMODE_64BIT: \
12492 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0); \
12493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12494 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12495 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12496 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12497 IEM_MC_REF_EFLAGS(pEFlags); \
12498 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
12499 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12500 IEM_MC_END(); \
12501 break; \
12502 \
12503 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12504 } \
12505 } \
12506 else \
12507 { \
12508 /* \
12509 * Memory target. \
12510 */ \
12511 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
12512 { \
12513 switch (pVCpu->iem.s.enmEffOpSize) \
12514 { \
12515 case IEMMODE_16BIT: \
12516 IEM_MC_BEGIN(2, 3, 0, 0); \
12517 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12518 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12520 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12521 \
12522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12524 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12525 IEM_MC_FETCH_EFLAGS(EFlags); \
12526 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
12527 \
12528 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
12529 IEM_MC_COMMIT_EFLAGS(EFlags); \
12530 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12531 IEM_MC_END(); \
12532 break; \
12533 \
12534 case IEMMODE_32BIT: \
12535 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
12536 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12537 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12539 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12540 \
12541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12543 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12544 IEM_MC_FETCH_EFLAGS(EFlags); \
12545 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
12546 \
12547 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
12548 IEM_MC_COMMIT_EFLAGS(EFlags); \
12549 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12550 IEM_MC_END(); \
12551 break; \
12552 \
12553 case IEMMODE_64BIT: \
12554 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
12555 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12556 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12558 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12559 \
12560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12562 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12563 IEM_MC_FETCH_EFLAGS(EFlags); \
12564 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
12565 \
12566 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
12567 IEM_MC_COMMIT_EFLAGS(EFlags); \
12568 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12569 IEM_MC_END(); \
12570 break; \
12571 \
12572 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12573 } \
12574 } \
12575 else \
12576 { \
12577 (void)0
12578
12579#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
12580 switch (pVCpu->iem.s.enmEffOpSize) \
12581 { \
12582 case IEMMODE_16BIT: \
12583 IEM_MC_BEGIN(2, 3, 0, 0); \
12584 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12585 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12587 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12588 \
12589 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12590 IEMOP_HLP_DONE_DECODING(); \
12591 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12592 IEM_MC_FETCH_EFLAGS(EFlags); \
12593 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
12594 \
12595 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
12596 IEM_MC_COMMIT_EFLAGS(EFlags); \
12597 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12598 IEM_MC_END(); \
12599 break; \
12600 \
12601 case IEMMODE_32BIT: \
12602 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
12603 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12604 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12606 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12607 \
12608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12609 IEMOP_HLP_DONE_DECODING(); \
12610 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12611 IEM_MC_FETCH_EFLAGS(EFlags); \
12612 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
12613 \
12614 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
12615 IEM_MC_COMMIT_EFLAGS(EFlags); \
12616 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12617 IEM_MC_END(); \
12618 break; \
12619 \
12620 case IEMMODE_64BIT: \
12621 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
12622 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12623 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12625 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12626 \
12627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12628 IEMOP_HLP_DONE_DECODING(); \
12629 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12630 IEM_MC_FETCH_EFLAGS(EFlags); \
12631 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
12632 \
12633 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
12634 IEM_MC_COMMIT_EFLAGS(EFlags); \
12635 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12636 IEM_MC_END(); \
12637 break; \
12638 \
12639 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12640 } \
12641 } \
12642 } \
12643 (void)0
12644
12645
12646/**
12647 * @opmaps grp3_f6
12648 * @opcode /0
12649 * @todo also /1
12650 */
12651FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
12652{
12653 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
12654 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12655
12656 if (IEM_IS_MODRM_REG_MODE(bRm))
12657 {
12658 /* register access */
12659 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12660 IEM_MC_BEGIN(3, 0, 0, 0);
12661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12662 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12663 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
12664 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12665 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12666 IEM_MC_REF_EFLAGS(pEFlags);
12667 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
12668 IEM_MC_ADVANCE_RIP_AND_FINISH();
12669 IEM_MC_END();
12670 }
12671 else
12672 {
12673 /* memory access. */
12674 IEM_MC_BEGIN(3, 3, 0, 0);
12675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12677
12678 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12680
12681 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12682 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
12683 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12684
12685 IEM_MC_ARG_CONST(uint8_t, u8Src, u8Imm, 1);
12686 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12687 IEM_MC_FETCH_EFLAGS(EFlags);
12688 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
12689
12690 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo);
12691 IEM_MC_COMMIT_EFLAGS(EFlags);
12692 IEM_MC_ADVANCE_RIP_AND_FINISH();
12693 IEM_MC_END();
12694 }
12695}
12696
12697
12698/** Opcode 0xf6 /4, /5, /6 and /7. */
12699FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
12700{
12701 if (IEM_IS_MODRM_REG_MODE(bRm))
12702 {
12703 /* register access */
12704 IEM_MC_BEGIN(3, 1, 0, 0);
12705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12706 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12707 IEM_MC_ARG(uint8_t, u8Value, 1);
12708 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12709 IEM_MC_LOCAL(int32_t, rc);
12710
12711 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12712 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12713 IEM_MC_REF_EFLAGS(pEFlags);
12714 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12715 IEM_MC_IF_LOCAL_IS_Z(rc) {
12716 IEM_MC_ADVANCE_RIP_AND_FINISH();
12717 } IEM_MC_ELSE() {
12718 IEM_MC_RAISE_DIVIDE_ERROR();
12719 } IEM_MC_ENDIF();
12720
12721 IEM_MC_END();
12722 }
12723 else
12724 {
12725 /* memory access. */
12726 IEM_MC_BEGIN(3, 2, 0, 0);
12727 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12728 IEM_MC_ARG(uint8_t, u8Value, 1);
12729 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12731 IEM_MC_LOCAL(int32_t, rc);
12732
12733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12735 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12736 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12737 IEM_MC_REF_EFLAGS(pEFlags);
12738 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12739 IEM_MC_IF_LOCAL_IS_Z(rc) {
12740 IEM_MC_ADVANCE_RIP_AND_FINISH();
12741 } IEM_MC_ELSE() {
12742 IEM_MC_RAISE_DIVIDE_ERROR();
12743 } IEM_MC_ENDIF();
12744
12745 IEM_MC_END();
12746 }
12747}
12748
12749
12750/** Opcode 0xf7 /4, /5, /6 and /7. */
12751FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
12752{
12753 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12754
12755 if (IEM_IS_MODRM_REG_MODE(bRm))
12756 {
12757 /* register access */
12758 switch (pVCpu->iem.s.enmEffOpSize)
12759 {
12760 case IEMMODE_16BIT:
12761 IEM_MC_BEGIN(4, 1, 0, 0);
12762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12763 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12764 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12765 IEM_MC_ARG(uint16_t, u16Value, 2);
12766 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12767 IEM_MC_LOCAL(int32_t, rc);
12768
12769 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12770 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12771 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12772 IEM_MC_REF_EFLAGS(pEFlags);
12773 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12774 IEM_MC_IF_LOCAL_IS_Z(rc) {
12775 IEM_MC_ADVANCE_RIP_AND_FINISH();
12776 } IEM_MC_ELSE() {
12777 IEM_MC_RAISE_DIVIDE_ERROR();
12778 } IEM_MC_ENDIF();
12779
12780 IEM_MC_END();
12781 break;
12782
12783 case IEMMODE_32BIT:
12784 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386, 0);
12785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12786 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12787 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12788 IEM_MC_ARG(uint32_t, u32Value, 2);
12789 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12790 IEM_MC_LOCAL(int32_t, rc);
12791
12792 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12793 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12794 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12795 IEM_MC_REF_EFLAGS(pEFlags);
12796 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12797 IEM_MC_IF_LOCAL_IS_Z(rc) {
12798 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
12799 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
12800 IEM_MC_ADVANCE_RIP_AND_FINISH();
12801 } IEM_MC_ELSE() {
12802 IEM_MC_RAISE_DIVIDE_ERROR();
12803 } IEM_MC_ENDIF();
12804
12805 IEM_MC_END();
12806 break;
12807
12808 case IEMMODE_64BIT:
12809 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0);
12810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12811 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12812 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12813 IEM_MC_ARG(uint64_t, u64Value, 2);
12814 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12815 IEM_MC_LOCAL(int32_t, rc);
12816
12817 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12818 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12819 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12820 IEM_MC_REF_EFLAGS(pEFlags);
12821 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12822 IEM_MC_IF_LOCAL_IS_Z(rc) {
12823 IEM_MC_ADVANCE_RIP_AND_FINISH();
12824 } IEM_MC_ELSE() {
12825 IEM_MC_RAISE_DIVIDE_ERROR();
12826 } IEM_MC_ENDIF();
12827
12828 IEM_MC_END();
12829 break;
12830
12831 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12832 }
12833 }
12834 else
12835 {
12836 /* memory access. */
12837 switch (pVCpu->iem.s.enmEffOpSize)
12838 {
12839 case IEMMODE_16BIT:
12840 IEM_MC_BEGIN(4, 2, 0, 0);
12841 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12842 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12843 IEM_MC_ARG(uint16_t, u16Value, 2);
12844 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12846 IEM_MC_LOCAL(int32_t, rc);
12847
12848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12850 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12851 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12852 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12853 IEM_MC_REF_EFLAGS(pEFlags);
12854 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12855 IEM_MC_IF_LOCAL_IS_Z(rc) {
12856 IEM_MC_ADVANCE_RIP_AND_FINISH();
12857 } IEM_MC_ELSE() {
12858 IEM_MC_RAISE_DIVIDE_ERROR();
12859 } IEM_MC_ENDIF();
12860
12861 IEM_MC_END();
12862 break;
12863
12864 case IEMMODE_32BIT:
12865 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386, 0);
12866 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12867 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12868 IEM_MC_ARG(uint32_t, u32Value, 2);
12869 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12871 IEM_MC_LOCAL(int32_t, rc);
12872
12873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12875 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12876 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12877 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12878 IEM_MC_REF_EFLAGS(pEFlags);
12879 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12880 IEM_MC_IF_LOCAL_IS_Z(rc) {
12881 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
12882 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
12883 IEM_MC_ADVANCE_RIP_AND_FINISH();
12884 } IEM_MC_ELSE() {
12885 IEM_MC_RAISE_DIVIDE_ERROR();
12886 } IEM_MC_ENDIF();
12887
12888 IEM_MC_END();
12889 break;
12890
12891 case IEMMODE_64BIT:
12892 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT, 0);
12893 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12894 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12895 IEM_MC_ARG(uint64_t, u64Value, 2);
12896 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12898 IEM_MC_LOCAL(int32_t, rc);
12899
12900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12902 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12903 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12904 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12905 IEM_MC_REF_EFLAGS(pEFlags);
12906 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12907 IEM_MC_IF_LOCAL_IS_Z(rc) {
12908 IEM_MC_ADVANCE_RIP_AND_FINISH();
12909 } IEM_MC_ELSE() {
12910 IEM_MC_RAISE_DIVIDE_ERROR();
12911 } IEM_MC_ENDIF();
12912
12913 IEM_MC_END();
12914 break;
12915
12916 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12917 }
12918 }
12919}
12920
12921
12922/**
12923 * @opmaps grp3_f6
12924 * @opcode /2
12925 */
12926FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
12927{
12928 IEMOP_MNEMONIC(not_Eb, "not Eb");
12929 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
12930}
12931
12932
12933/**
12934 * @opmaps grp3_f6
12935 * @opcode /3
12936 */
12937FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
12938{
12939 IEMOP_MNEMONIC(net_Eb, "neg Eb");
12940 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
12941}
12942
12943
12944/**
12945 * @opcode 0xf6
12946 */
12947FNIEMOP_DEF(iemOp_Grp3_Eb)
12948{
12949 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12950 switch (IEM_GET_MODRM_REG_8(bRm))
12951 {
12952 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12953 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12954 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
12955 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
12956 case 4:
12957 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
12958 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12959 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
12960 case 5:
12961 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
12962 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12963 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
12964 case 6:
12965 IEMOP_MNEMONIC(div_Eb, "div Eb");
12966 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12967 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
12968 case 7:
12969 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
12970 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12971 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
12972 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12973 }
12974}
12975
12976
12977/** Opcode 0xf7 /0. */
12978FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
12979{
12980 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
12981 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12982
12983 if (IEM_IS_MODRM_REG_MODE(bRm))
12984 {
12985 /* register access */
12986 switch (pVCpu->iem.s.enmEffOpSize)
12987 {
12988 case IEMMODE_16BIT:
12989 IEM_MC_BEGIN(3, 0, 0, 0);
12990 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12992 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12993 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
12994 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12995 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12996 IEM_MC_REF_EFLAGS(pEFlags);
12997 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12998 IEM_MC_ADVANCE_RIP_AND_FINISH();
12999 IEM_MC_END();
13000 break;
13001
13002 case IEMMODE_32BIT:
13003 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
13004 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13006 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13007 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
13008 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13009 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13010 IEM_MC_REF_EFLAGS(pEFlags);
13011 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13012 /* No clearing the high dword here - test doesn't write back the result. */
13013 IEM_MC_ADVANCE_RIP_AND_FINISH();
13014 IEM_MC_END();
13015 break;
13016
13017 case IEMMODE_64BIT:
13018 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
13019 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13021 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13022 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
13023 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13024 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13025 IEM_MC_REF_EFLAGS(pEFlags);
13026 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13027 IEM_MC_ADVANCE_RIP_AND_FINISH();
13028 IEM_MC_END();
13029 break;
13030
13031 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13032 }
13033 }
13034 else
13035 {
13036 /* memory access. */
13037 switch (pVCpu->iem.s.enmEffOpSize)
13038 {
13039 case IEMMODE_16BIT:
13040 IEM_MC_BEGIN(3, 3, 0, 0);
13041 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13043
13044 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13046
13047 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13048 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
13049 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13050
13051 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
13052 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13053 IEM_MC_FETCH_EFLAGS(EFlags);
13054 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13055
13056 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo);
13057 IEM_MC_COMMIT_EFLAGS(EFlags);
13058 IEM_MC_ADVANCE_RIP_AND_FINISH();
13059 IEM_MC_END();
13060 break;
13061
13062 case IEMMODE_32BIT:
13063 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
13064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13065 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13066
13067 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13069
13070 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13071 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
13072 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13073
13074 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
13075 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13076 IEM_MC_FETCH_EFLAGS(EFlags);
13077 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13078
13079 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo);
13080 IEM_MC_COMMIT_EFLAGS(EFlags);
13081 IEM_MC_ADVANCE_RIP_AND_FINISH();
13082 IEM_MC_END();
13083 break;
13084
13085 case IEMMODE_64BIT:
13086 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
13087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13089
13090 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13092
13093 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
13094 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13095 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13096
13097 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1);
13098 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13099 IEM_MC_FETCH_EFLAGS(EFlags);
13100 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13101
13102 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo);
13103 IEM_MC_COMMIT_EFLAGS(EFlags);
13104 IEM_MC_ADVANCE_RIP_AND_FINISH();
13105 IEM_MC_END();
13106 break;
13107
13108 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13109 }
13110 }
13111}
13112
13113
13114/** Opcode 0xf7 /2. */
13115FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
13116{
13117 IEMOP_MNEMONIC(not_Ev, "not Ev");
13118 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
13119 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
13120}
13121
13122
13123/** Opcode 0xf7 /3. */
13124FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
13125{
13126 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
13127 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
13128 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
13129}
13130
13131
13132/**
13133 * @opcode 0xf7
13134 */
13135FNIEMOP_DEF(iemOp_Grp3_Ev)
13136{
13137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13138 switch (IEM_GET_MODRM_REG_8(bRm))
13139 {
13140 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13141 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13142 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
13143 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
13144 case 4:
13145 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
13146 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13147 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
13148 case 5:
13149 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
13150 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13151 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
13152 case 6:
13153 IEMOP_MNEMONIC(div_Ev, "div Ev");
13154 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13155 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
13156 case 7:
13157 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
13158 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13159 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
13160 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13161 }
13162}
13163
13164
13165/**
13166 * @opcode 0xf8
13167 */
13168FNIEMOP_DEF(iemOp_clc)
13169{
13170 IEMOP_MNEMONIC(clc, "clc");
13171 IEM_MC_BEGIN(0, 0, 0, 0);
13172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13173 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
13174 IEM_MC_ADVANCE_RIP_AND_FINISH();
13175 IEM_MC_END();
13176}
13177
13178
13179/**
13180 * @opcode 0xf9
13181 */
13182FNIEMOP_DEF(iemOp_stc)
13183{
13184 IEMOP_MNEMONIC(stc, "stc");
13185 IEM_MC_BEGIN(0, 0, 0, 0);
13186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13187 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
13188 IEM_MC_ADVANCE_RIP_AND_FINISH();
13189 IEM_MC_END();
13190}
13191
13192
13193/**
13194 * @opcode 0xfa
13195 */
13196FNIEMOP_DEF(iemOp_cli)
13197{
13198 IEMOP_MNEMONIC(cli, "cli");
13199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13200 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, iemCImpl_cli);
13201}
13202
13203
13204FNIEMOP_DEF(iemOp_sti)
13205{
13206 IEMOP_MNEMONIC(sti, "sti");
13207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13208 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
13209 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, iemCImpl_sti);
13210}
13211
13212
13213/**
13214 * @opcode 0xfc
13215 */
13216FNIEMOP_DEF(iemOp_cld)
13217{
13218 IEMOP_MNEMONIC(cld, "cld");
13219 IEM_MC_BEGIN(0, 0, 0, 0);
13220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13221 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
13222 IEM_MC_ADVANCE_RIP_AND_FINISH();
13223 IEM_MC_END();
13224}
13225
13226
13227/**
13228 * @opcode 0xfd
13229 */
13230FNIEMOP_DEF(iemOp_std)
13231{
13232 IEMOP_MNEMONIC(std, "std");
13233 IEM_MC_BEGIN(0, 0, 0, 0);
13234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13235 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
13236 IEM_MC_ADVANCE_RIP_AND_FINISH();
13237 IEM_MC_END();
13238}
13239
13240
13241/**
13242 * @opmaps grp4
13243 * @opcode /0
13244 */
13245FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
13246{
13247 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
13248 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
13249}
13250
13251
13252/**
13253 * @opmaps grp4
13254 * @opcode /1
13255 */
13256FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
13257{
13258 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
13259 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
13260}
13261
13262
13263/**
13264 * @opcode 0xfe
13265 */
13266FNIEMOP_DEF(iemOp_Grp4)
13267{
13268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13269 switch (IEM_GET_MODRM_REG_8(bRm))
13270 {
13271 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
13272 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
13273 default:
13274 /** @todo is the eff-addr decoded? */
13275 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
13276 IEMOP_RAISE_INVALID_OPCODE_RET();
13277 }
13278}
13279
13280/** Opcode 0xff /0. */
13281FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
13282{
13283 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
13284 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
13285 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
13286}
13287
13288
13289/** Opcode 0xff /1. */
13290FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
13291{
13292 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
13293 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
13294 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
13295}
13296
13297
13298/**
13299 * Opcode 0xff /2.
13300 * @param bRm The RM byte.
13301 */
13302FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
13303{
13304 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
13305 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13306
13307 if (IEM_IS_MODRM_REG_MODE(bRm))
13308 {
13309 /* The new RIP is taken from a register. */
13310 switch (pVCpu->iem.s.enmEffOpSize)
13311 {
13312 case IEMMODE_16BIT:
13313 IEM_MC_BEGIN(1, 0, 0, 0);
13314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13315 IEM_MC_ARG(uint16_t, u16Target, 0);
13316 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13317 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
13318 IEM_MC_END();
13319 break;
13320
13321 case IEMMODE_32BIT:
13322 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
13323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13324 IEM_MC_ARG(uint32_t, u32Target, 0);
13325 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13326 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
13327 IEM_MC_END();
13328 break;
13329
13330 case IEMMODE_64BIT:
13331 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
13332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13333 IEM_MC_ARG(uint64_t, u64Target, 0);
13334 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13335 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
13336 IEM_MC_END();
13337 break;
13338
13339 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13340 }
13341 }
13342 else
13343 {
13344 /* The new RIP is taken from a register. */
13345 switch (pVCpu->iem.s.enmEffOpSize)
13346 {
13347 case IEMMODE_16BIT:
13348 IEM_MC_BEGIN(1, 1, 0, 0);
13349 IEM_MC_ARG(uint16_t, u16Target, 0);
13350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13353 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13354 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
13355 IEM_MC_END();
13356 break;
13357
13358 case IEMMODE_32BIT:
13359 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
13360 IEM_MC_ARG(uint32_t, u32Target, 0);
13361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13364 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13365 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
13366 IEM_MC_END();
13367 break;
13368
13369 case IEMMODE_64BIT:
13370 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT, 0);
13371 IEM_MC_ARG(uint64_t, u64Target, 0);
13372 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13375 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13376 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
13377 IEM_MC_END();
13378 break;
13379
13380 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13381 }
13382 }
13383}
13384
13385#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl) \
13386 /* Registers? How?? */ \
13387 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
13388 { /* likely */ } \
13389 else \
13390 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
13391 \
13392 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
13393 /** @todo what does VIA do? */ \
13394 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
13395 { /* likely */ } \
13396 else \
13397 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
13398 \
13399 /* Far pointer loaded from memory. */ \
13400 switch (pVCpu->iem.s.enmEffOpSize) \
13401 { \
13402 case IEMMODE_16BIT: \
13403 IEM_MC_BEGIN(3, 1, 0, 0); \
13404 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13405 IEM_MC_ARG(uint16_t, offSeg, 1); \
13406 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
13407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13410 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13411 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
13412 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
13413 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
13414 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13415 IEM_MC_END(); \
13416 break; \
13417 \
13418 case IEMMODE_32BIT: \
13419 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
13420 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13421 IEM_MC_ARG(uint32_t, offSeg, 1); \
13422 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
13423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13426 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13427 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
13428 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
13429 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
13430 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13431 IEM_MC_END(); \
13432 break; \
13433 \
13434 case IEMMODE_64BIT: \
13435 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
13436 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0); \
13437 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13438 IEM_MC_ARG(uint64_t, offSeg, 1); \
13439 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
13440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13443 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13444 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
13445 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE /* no gates */, \
13446 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13447 IEM_MC_END(); \
13448 break; \
13449 \
13450 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13451 } do {} while (0)
13452
13453
13454/**
13455 * Opcode 0xff /3.
13456 * @param bRm The RM byte.
13457 */
13458FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
13459{
13460 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
13461 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf);
13462}
13463
13464
13465/**
13466 * Opcode 0xff /4.
13467 * @param bRm The RM byte.
13468 */
13469FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
13470{
13471 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
13472 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13473
13474 if (IEM_IS_MODRM_REG_MODE(bRm))
13475 {
13476 /* The new RIP is taken from a register. */
13477 switch (pVCpu->iem.s.enmEffOpSize)
13478 {
13479 case IEMMODE_16BIT:
13480 IEM_MC_BEGIN(0, 1, 0, 0);
13481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13482 IEM_MC_LOCAL(uint16_t, u16Target);
13483 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13484 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
13485 IEM_MC_END();
13486 break;
13487
13488 case IEMMODE_32BIT:
13489 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
13490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13491 IEM_MC_LOCAL(uint32_t, u32Target);
13492 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13493 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
13494 IEM_MC_END();
13495 break;
13496
13497 case IEMMODE_64BIT:
13498 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
13499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13500 IEM_MC_LOCAL(uint64_t, u64Target);
13501 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13502 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
13503 IEM_MC_END();
13504 break;
13505
13506 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13507 }
13508 }
13509 else
13510 {
13511 /* The new RIP is taken from a memory location. */
13512 switch (pVCpu->iem.s.enmEffOpSize)
13513 {
13514 case IEMMODE_16BIT:
13515 IEM_MC_BEGIN(0, 2, 0, 0);
13516 IEM_MC_LOCAL(uint16_t, u16Target);
13517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13520 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13521 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
13522 IEM_MC_END();
13523 break;
13524
13525 case IEMMODE_32BIT:
13526 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
13527 IEM_MC_LOCAL(uint32_t, u32Target);
13528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13531 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13532 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
13533 IEM_MC_END();
13534 break;
13535
13536 case IEMMODE_64BIT:
13537 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
13538 IEM_MC_LOCAL(uint64_t, u64Target);
13539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13542 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13543 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
13544 IEM_MC_END();
13545 break;
13546
13547 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13548 }
13549 }
13550}
13551
13552
13553/**
13554 * Opcode 0xff /5.
13555 * @param bRm The RM byte.
13556 */
13557FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
13558{
13559 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
13560 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp);
13561}
13562
13563
13564/**
13565 * Opcode 0xff /6.
13566 * @param bRm The RM byte.
13567 */
13568FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
13569{
13570 IEMOP_MNEMONIC(push_Ev, "push Ev");
13571
13572 /* Registers are handled by a common worker. */
13573 if (IEM_IS_MODRM_REG_MODE(bRm))
13574 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
13575
13576 /* Memory we do here. */
13577 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13578 switch (pVCpu->iem.s.enmEffOpSize)
13579 {
13580 case IEMMODE_16BIT:
13581 IEM_MC_BEGIN(0, 2, 0, 0);
13582 IEM_MC_LOCAL(uint16_t, u16Src);
13583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13586 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13587 IEM_MC_PUSH_U16(u16Src);
13588 IEM_MC_ADVANCE_RIP_AND_FINISH();
13589 IEM_MC_END();
13590 break;
13591
13592 case IEMMODE_32BIT:
13593 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
13594 IEM_MC_LOCAL(uint32_t, u32Src);
13595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13598 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13599 IEM_MC_PUSH_U32(u32Src);
13600 IEM_MC_ADVANCE_RIP_AND_FINISH();
13601 IEM_MC_END();
13602 break;
13603
13604 case IEMMODE_64BIT:
13605 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
13606 IEM_MC_LOCAL(uint64_t, u64Src);
13607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13610 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13611 IEM_MC_PUSH_U64(u64Src);
13612 IEM_MC_ADVANCE_RIP_AND_FINISH();
13613 IEM_MC_END();
13614 break;
13615
13616 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13617 }
13618}
13619
13620
13621/**
13622 * @opcode 0xff
13623 */
13624FNIEMOP_DEF(iemOp_Grp5)
13625{
13626 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13627 switch (IEM_GET_MODRM_REG_8(bRm))
13628 {
13629 case 0:
13630 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
13631 case 1:
13632 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
13633 case 2:
13634 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
13635 case 3:
13636 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
13637 case 4:
13638 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
13639 case 5:
13640 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
13641 case 6:
13642 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
13643 case 7:
13644 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
13645 IEMOP_RAISE_INVALID_OPCODE_RET();
13646 }
13647 AssertFailedReturn(VERR_IEM_IPE_3);
13648}
13649
13650
13651
13652const PFNIEMOP g_apfnOneByteMap[256] =
13653{
13654 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
13655 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
13656 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
13657 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
13658 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
13659 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
13660 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
13661 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
13662 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
13663 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
13664 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
13665 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
13666 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
13667 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
13668 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
13669 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
13670 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
13671 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
13672 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
13673 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
13674 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
13675 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
13676 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
13677 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
13678 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
13679 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
13680 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
13681 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
13682 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
13683 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
13684 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
13685 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
13686 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
13687 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
13688 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
13689 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
13690 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
13691 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
13692 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
13693 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
13694 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
13695 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
13696 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
13697 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
13698 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
13699 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
13700 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
13701 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
13702 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
13703 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
13704 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
13705 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
13706 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
13707 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
13708 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
13709 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
13710 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
13711 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
13712 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
13713 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
13714 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
13715 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
13716 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
13717 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
13718};
13719
13720
13721/** @} */
13722
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette