VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 102397

Last change on this file since 102397 was 102397, checked in by vboxsync, 17 months ago

VMM/IEM: Enabled native IEM_MC_STORE_MEM[_FLAT]_U8/16/32/64. bugref:10371

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 532.0 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 102397 2023-11-30 13:53:54Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 *
63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
64 */
65#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8) \
66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
67 \
68 /* \
69 * If rm is denoting a register, no more instruction bytes. \
70 */ \
71 if (IEM_IS_MODRM_REG_MODE(bRm)) \
72 { \
73 IEM_MC_BEGIN(3, 0, 0, 0); \
74 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
75 IEM_MC_ARG(uint8_t, u8Src, 1); \
76 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
77 \
78 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
79 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
80 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
81 IEM_MC_REF_EFLAGS(pEFlags); \
82 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
83 \
84 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
85 IEM_MC_END(); \
86 } \
87 else \
88 { \
89 /* \
90 * We're accessing memory. \
91 * Note! We're putting the eflags on the stack here so we can commit them \
92 * after the memory. \
93 */ \
94 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
95 { \
96 IEM_MC_BEGIN(3, 3, 0, 0); \
97 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
98 IEM_MC_ARG(uint8_t, u8Src, 1); \
99 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
101 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
102 \
103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
104 IEMOP_HLP_DONE_DECODING(); \
105 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
106 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
107 IEM_MC_FETCH_EFLAGS(EFlags); \
108 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
109 \
110 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
111 IEM_MC_COMMIT_EFLAGS(EFlags); \
112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
113 IEM_MC_END(); \
114 } \
115 else \
116 { \
117 (void)0
118
119/**
120 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
121 * operands.
122 *
123 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
124 */
125#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8) \
126 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
127 \
128 /* \
129 * If rm is denoting a register, no more instruction bytes. \
130 */ \
131 if (IEM_IS_MODRM_REG_MODE(bRm)) \
132 { \
133 IEM_MC_BEGIN(3, 0, 0, 0); \
134 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
135 IEM_MC_ARG(uint8_t, u8Src, 1); \
136 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
137 \
138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
139 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
140 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
141 IEM_MC_REF_EFLAGS(pEFlags); \
142 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
143 \
144 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
145 IEM_MC_END(); \
146 } \
147 else \
148 { \
149 /* \
150 * We're accessing memory. \
151 * Note! We're putting the eflags on the stack here so we can commit them \
152 * after the memory. \
153 */ \
154 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
155 { \
156 IEM_MC_BEGIN(3, 3, 0, 0); \
157 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
158 IEM_MC_ARG(uint8_t, u8Src, 1); \
159 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
161 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
162 \
163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
164 IEMOP_HLP_DONE_DECODING(); \
165 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
166 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
167 IEM_MC_FETCH_EFLAGS(EFlags); \
168 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
169 \
170 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
171 IEM_MC_COMMIT_EFLAGS(EFlags); \
172 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
173 IEM_MC_END(); \
174 } \
175 else \
176 { \
177 (void)0
178
179#define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \
180 IEMOP_HLP_DONE_DECODING(); \
181 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
182 } \
183 } \
184 (void)0
185
186#define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \
187 IEM_MC_BEGIN(3, 3, 0, 0); \
188 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
189 IEM_MC_ARG(uint8_t, u8Src, 1); \
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
192 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
193 \
194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
195 IEMOP_HLP_DONE_DECODING(); \
196 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
197 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
198 IEM_MC_FETCH_EFLAGS(EFlags); \
199 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
200 \
201 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bMapInfoDst); \
202 IEM_MC_COMMIT_EFLAGS(EFlags); \
203 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
204 IEM_MC_END(); \
205 } \
206 } \
207 (void)0
208
209/**
210 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
211 * destination.
212 */
213#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
215 \
216 /* \
217 * If rm is denoting a register, no more instruction bytes. \
218 */ \
219 if (IEM_IS_MODRM_REG_MODE(bRm)) \
220 { \
221 IEM_MC_BEGIN(3, 0, 0, 0); \
222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
223 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
224 IEM_MC_ARG(uint8_t, u8Src, 1); \
225 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
226 \
227 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
228 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
229 IEM_MC_REF_EFLAGS(pEFlags); \
230 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
231 \
232 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
233 IEM_MC_END(); \
234 } \
235 else \
236 { \
237 /* \
238 * We're accessing memory. \
239 */ \
240 IEM_MC_BEGIN(3, 1, 0, 0); \
241 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
242 IEM_MC_ARG(uint8_t, u8Src, 1); \
243 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
245 \
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
248 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
249 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
250 IEM_MC_REF_EFLAGS(pEFlags); \
251 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
252 \
253 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
254 IEM_MC_END(); \
255 } \
256 (void)0
257
258
259/**
260 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
261 * memory/register as the destination.
262 */
263#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
265 \
266 /* \
267 * If rm is denoting a register, no more instruction bytes. \
268 */ \
269 if (IEM_IS_MODRM_REG_MODE(bRm)) \
270 { \
271 switch (pVCpu->iem.s.enmEffOpSize) \
272 { \
273 case IEMMODE_16BIT: \
274 IEM_MC_BEGIN(3, 0, 0, 0); \
275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
276 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
277 IEM_MC_ARG(uint16_t, u16Src, 1); \
278 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
279 \
280 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
281 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
282 IEM_MC_REF_EFLAGS(pEFlags); \
283 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
284 \
285 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
286 IEM_MC_END(); \
287 break; \
288 \
289 case IEMMODE_32BIT: \
290 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
292 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
293 IEM_MC_ARG(uint32_t, u32Src, 1); \
294 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
295 \
296 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
297 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
298 IEM_MC_REF_EFLAGS(pEFlags); \
299 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
300 \
301 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
302 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
303 IEM_MC_END(); \
304 break; \
305 \
306 case IEMMODE_64BIT: \
307 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
309 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
310 IEM_MC_ARG(uint64_t, u64Src, 1); \
311 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
312 \
313 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
314 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
315 IEM_MC_REF_EFLAGS(pEFlags); \
316 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
317 \
318 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
319 IEM_MC_END(); \
320 break; \
321 \
322 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
323 } \
324 } \
325 else \
326 { \
327 /* \
328 * We're accessing memory. \
329 * Note! We're putting the eflags on the stack here so we can commit them \
330 * after the memory. \
331 */ \
332 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
333 { \
334 switch (pVCpu->iem.s.enmEffOpSize) \
335 { \
336 case IEMMODE_16BIT: \
337 IEM_MC_BEGIN(3, 3, 0, 0); \
338 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
339 IEM_MC_ARG(uint16_t, u16Src, 1); \
340 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
342 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
343 \
344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
345 IEMOP_HLP_DONE_DECODING(); \
346 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
347 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
348 IEM_MC_FETCH_EFLAGS(EFlags); \
349 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
350 \
351 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
352 IEM_MC_COMMIT_EFLAGS(EFlags); \
353 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
354 IEM_MC_END(); \
355 break; \
356 \
357 case IEMMODE_32BIT: \
358 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
359 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
360 IEM_MC_ARG(uint32_t, u32Src, 1); \
361 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
363 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
364 \
365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
366 IEMOP_HLP_DONE_DECODING(); \
367 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
368 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
369 IEM_MC_FETCH_EFLAGS(EFlags); \
370 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
371 \
372 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
373 IEM_MC_COMMIT_EFLAGS(EFlags); \
374 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
375 IEM_MC_END(); \
376 break; \
377 \
378 case IEMMODE_64BIT: \
379 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
380 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
381 IEM_MC_ARG(uint64_t, u64Src, 1); \
382 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
384 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
385 \
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
387 IEMOP_HLP_DONE_DECODING(); \
388 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
389 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
390 IEM_MC_FETCH_EFLAGS(EFlags); \
391 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
392 \
393 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
394 IEM_MC_COMMIT_EFLAGS(EFlags); \
395 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
396 IEM_MC_END(); \
397 break; \
398 \
399 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
400 } \
401 } \
402 else \
403 { \
404 (void)0
405/* Separate macro to work around parsing issue in IEMAllInstPython.py */
406#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
407 switch (pVCpu->iem.s.enmEffOpSize) \
408 { \
409 case IEMMODE_16BIT: \
410 IEM_MC_BEGIN(3, 3, 0, 0); \
411 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
412 IEM_MC_ARG(uint16_t, u16Src, 1); \
413 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
415 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
416 \
417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
418 IEMOP_HLP_DONE_DECODING(); \
419 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
420 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
421 IEM_MC_FETCH_EFLAGS(EFlags); \
422 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
423 \
424 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
425 IEM_MC_COMMIT_EFLAGS(EFlags); \
426 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
427 IEM_MC_END(); \
428 break; \
429 \
430 case IEMMODE_32BIT: \
431 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
432 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
433 IEM_MC_ARG(uint32_t, u32Src, 1); \
434 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
436 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
437 \
438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
439 IEMOP_HLP_DONE_DECODING(); \
440 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
441 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
442 IEM_MC_FETCH_EFLAGS(EFlags); \
443 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
444 \
445 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo /* CMP,TEST */); \
446 IEM_MC_COMMIT_EFLAGS(EFlags); \
447 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
448 IEM_MC_END(); \
449 break; \
450 \
451 case IEMMODE_64BIT: \
452 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
453 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
454 IEM_MC_ARG(uint64_t, u64Src, 1); \
455 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
457 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
458 \
459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
460 IEMOP_HLP_DONE_DECODING(); \
461 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
462 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
463 IEM_MC_FETCH_EFLAGS(EFlags); \
464 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
465 \
466 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
467 IEM_MC_COMMIT_EFLAGS(EFlags); \
468 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
469 IEM_MC_END(); \
470 break; \
471 \
472 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
473 } \
474 } \
475 } \
476 (void)0
477
478/**
479 * Body for read-only word/dword/qword instructions like TEST and CMP with
480 * memory/register as the destination.
481 */
482#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
484 \
485 /* \
486 * If rm is denoting a register, no more instruction bytes. \
487 */ \
488 if (IEM_IS_MODRM_REG_MODE(bRm)) \
489 { \
490 switch (pVCpu->iem.s.enmEffOpSize) \
491 { \
492 case IEMMODE_16BIT: \
493 IEM_MC_BEGIN(3, 0, 0, 0); \
494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
495 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
496 IEM_MC_ARG(uint16_t, u16Src, 1); \
497 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
498 \
499 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
500 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
501 IEM_MC_REF_EFLAGS(pEFlags); \
502 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
503 \
504 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
505 IEM_MC_END(); \
506 break; \
507 \
508 case IEMMODE_32BIT: \
509 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
511 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
512 IEM_MC_ARG(uint32_t, u32Src, 1); \
513 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
514 \
515 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
516 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
517 IEM_MC_REF_EFLAGS(pEFlags); \
518 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
519 \
520 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
521 IEM_MC_END(); \
522 break; \
523 \
524 case IEMMODE_64BIT: \
525 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
527 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
528 IEM_MC_ARG(uint64_t, u64Src, 1); \
529 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
530 \
531 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
532 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
533 IEM_MC_REF_EFLAGS(pEFlags); \
534 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
535 \
536 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
537 IEM_MC_END(); \
538 break; \
539 \
540 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
541 } \
542 } \
543 else \
544 { \
545 /* \
546 * We're accessing memory. \
547 * Note! We're putting the eflags on the stack here so we can commit them \
548 * after the memory. \
549 */ \
550 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
551 { \
552 switch (pVCpu->iem.s.enmEffOpSize) \
553 { \
554 case IEMMODE_16BIT: \
555 IEM_MC_BEGIN(3, 3, 0, 0); \
556 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
557 IEM_MC_ARG(uint16_t, u16Src, 1); \
558 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
560 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
561 \
562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
563 IEMOP_HLP_DONE_DECODING(); \
564 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
565 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
566 IEM_MC_FETCH_EFLAGS(EFlags); \
567 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
568 \
569 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
570 IEM_MC_COMMIT_EFLAGS(EFlags); \
571 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
572 IEM_MC_END(); \
573 break; \
574 \
575 case IEMMODE_32BIT: \
576 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
577 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
578 IEM_MC_ARG(uint32_t, u32Src, 1); \
579 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
581 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
582 \
583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
584 IEMOP_HLP_DONE_DECODING(); \
585 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
586 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
587 IEM_MC_FETCH_EFLAGS(EFlags); \
588 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
589 \
590 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
591 IEM_MC_COMMIT_EFLAGS(EFlags); \
592 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
593 IEM_MC_END(); \
594 break; \
595 \
596 case IEMMODE_64BIT: \
597 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
598 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
599 IEM_MC_ARG(uint64_t, u64Src, 1); \
600 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
602 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
603 \
604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
605 IEMOP_HLP_DONE_DECODING(); \
606 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
607 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
608 IEM_MC_FETCH_EFLAGS(EFlags); \
609 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
610 \
611 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
612 IEM_MC_COMMIT_EFLAGS(EFlags); \
613 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
614 IEM_MC_END(); \
615 break; \
616 \
617 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
618 } \
619 } \
620 else \
621 { \
622 IEMOP_HLP_DONE_DECODING(); \
623 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
624 } \
625 } \
626 (void)0
627
628
629/**
630 * Body for instructions like ADD, AND, OR, ++ with working on AL with
631 * a byte immediate.
632 */
633#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
634 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
635 \
636 IEM_MC_BEGIN(3, 0, 0, 0); \
637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
638 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
639 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
640 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
641 \
642 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
643 IEM_MC_REF_EFLAGS(pEFlags); \
644 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
645 \
646 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
647 IEM_MC_END()
648
649/**
650 * Body for instructions like ADD, AND, OR, ++ with working on
651 * AX/EAX/RAX with a word/dword immediate.
652 */
653#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
654 switch (pVCpu->iem.s.enmEffOpSize) \
655 { \
656 case IEMMODE_16BIT: \
657 { \
658 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
659 \
660 IEM_MC_BEGIN(3, 0, 0, 0); \
661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
662 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
663 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
664 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
665 \
666 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
667 IEM_MC_REF_EFLAGS(pEFlags); \
668 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
669 \
670 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
671 IEM_MC_END(); \
672 } \
673 \
674 case IEMMODE_32BIT: \
675 { \
676 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
677 \
678 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
680 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
681 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
682 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
683 \
684 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
685 IEM_MC_REF_EFLAGS(pEFlags); \
686 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
687 \
688 if (a_fModifiesDstReg) \
689 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
690 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
691 IEM_MC_END(); \
692 } \
693 \
694 case IEMMODE_64BIT: \
695 { \
696 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
697 \
698 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
700 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
701 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
702 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
703 \
704 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
705 IEM_MC_REF_EFLAGS(pEFlags); \
706 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
707 \
708 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
709 IEM_MC_END(); \
710 } \
711 \
712 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
713 } \
714 (void)0
715
716
717
718/* Instruction specification format - work in progress: */
719
720/**
721 * @opcode 0x00
722 * @opmnemonic add
723 * @op1 rm:Eb
724 * @op2 reg:Gb
725 * @opmaps one
726 * @openc ModR/M
727 * @opflmodify cf,pf,af,zf,sf,of
728 * @ophints harmless ignores_op_sizes
729 * @opstats add_Eb_Gb
730 * @opgroup og_gen_arith_bin
731 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
732 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
733 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
734 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
735 */
736FNIEMOP_DEF(iemOp_add_Eb_Gb)
737{
738 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
739 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_add_u8);
740 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked);
741}
742
743
744/**
745 * @opcode 0x01
746 * @opgroup og_gen_arith_bin
747 * @opflmodify cf,pf,af,zf,sf,of
748 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
749 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
750 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
751 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
752 */
753FNIEMOP_DEF(iemOp_add_Ev_Gv)
754{
755 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
756 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
757 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
758}
759
760
761/**
762 * @opcode 0x02
763 * @opgroup og_gen_arith_bin
764 * @opflmodify cf,pf,af,zf,sf,of
765 * @opcopytests iemOp_add_Eb_Gb
766 */
767FNIEMOP_DEF(iemOp_add_Gb_Eb)
768{
769 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
770 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
771}
772
773
774/**
775 * @opcode 0x03
776 * @opgroup og_gen_arith_bin
777 * @opflmodify cf,pf,af,zf,sf,of
778 * @opcopytests iemOp_add_Ev_Gv
779 */
780FNIEMOP_DEF(iemOp_add_Gv_Ev)
781{
782 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
783 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1, 0);
784}
785
786
787/**
788 * @opcode 0x04
789 * @opgroup og_gen_arith_bin
790 * @opflmodify cf,pf,af,zf,sf,of
791 * @opcopytests iemOp_add_Eb_Gb
792 */
793FNIEMOP_DEF(iemOp_add_Al_Ib)
794{
795 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
796 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
797}
798
799
800/**
801 * @opcode 0x05
802 * @opgroup og_gen_arith_bin
803 * @opflmodify cf,pf,af,zf,sf,of
804 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
805 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
806 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
807 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
808 */
809FNIEMOP_DEF(iemOp_add_eAX_Iz)
810{
811 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
812 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
813}
814
815
816/**
817 * @opcode 0x06
818 * @opgroup og_stack_sreg
819 */
820FNIEMOP_DEF(iemOp_push_ES)
821{
822 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
823 IEMOP_HLP_NO_64BIT();
824 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
825}
826
827
828/**
829 * @opcode 0x07
830 * @opgroup og_stack_sreg
831 */
832FNIEMOP_DEF(iemOp_pop_ES)
833{
834 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
835 IEMOP_HLP_NO_64BIT();
836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
837 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
838 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
839 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
840 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
841 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES),
842 iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
843}
844
845
846/**
847 * @opcode 0x08
848 * @opgroup og_gen_arith_bin
849 * @opflmodify cf,pf,af,zf,sf,of
850 * @opflundef af
851 * @opflclear of,cf
852 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
853 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
854 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
855 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
856 */
857FNIEMOP_DEF(iemOp_or_Eb_Gb)
858{
859 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
860 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
861 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_or_u8);
862 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked);
863}
864
865
866/*
867 * @opcode 0x09
868 * @opgroup og_gen_arith_bin
869 * @opflmodify cf,pf,af,zf,sf,of
870 * @opflundef af
871 * @opflclear of,cf
872 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
873 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
874 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
875 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
876 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
877 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
878 */
879FNIEMOP_DEF(iemOp_or_Ev_Gv)
880{
881 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
882 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
883 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
884 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
885}
886
887
888/**
889 * @opcode 0x0a
890 * @opgroup og_gen_arith_bin
891 * @opflmodify cf,pf,af,zf,sf,of
892 * @opflundef af
893 * @opflclear of,cf
894 * @opcopytests iemOp_or_Eb_Gb
895 */
896FNIEMOP_DEF(iemOp_or_Gb_Eb)
897{
898 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
899 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
900 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
901}
902
903
904/**
905 * @opcode 0x0b
906 * @opgroup og_gen_arith_bin
907 * @opflmodify cf,pf,af,zf,sf,of
908 * @opflundef af
909 * @opflclear of,cf
910 * @opcopytests iemOp_or_Ev_Gv
911 */
912FNIEMOP_DEF(iemOp_or_Gv_Ev)
913{
914 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
915 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
916 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1, 0);
917}
918
919
920/**
921 * @opcode 0x0c
922 * @opgroup og_gen_arith_bin
923 * @opflmodify cf,pf,af,zf,sf,of
924 * @opflundef af
925 * @opflclear of,cf
926 * @opcopytests iemOp_or_Eb_Gb
927 */
928FNIEMOP_DEF(iemOp_or_Al_Ib)
929{
930 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
931 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
932 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
933}
934
935
936/**
937 * @opcode 0x0d
938 * @opgroup og_gen_arith_bin
939 * @opflmodify cf,pf,af,zf,sf,of
940 * @opflundef af
941 * @opflclear of,cf
942 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
943 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
944 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
945 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
946 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
947 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
948 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
949 */
950FNIEMOP_DEF(iemOp_or_eAX_Iz)
951{
952 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
953 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
954 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
955}
956
957
958/**
959 * @opcode 0x0e
960 * @opgroup og_stack_sreg
961 */
962FNIEMOP_DEF(iemOp_push_CS)
963{
964 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
965 IEMOP_HLP_NO_64BIT();
966 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
967}
968
969
970/**
971 * @opcode 0x0f
972 * @opmnemonic EscTwo0f
973 * @openc two0f
974 * @opdisenum OP_2B_ESC
975 * @ophints harmless
976 * @opgroup og_escapes
977 */
978FNIEMOP_DEF(iemOp_2byteEscape)
979{
980#if 0 /// @todo def VBOX_STRICT
981 /* Sanity check the table the first time around. */
982 static bool s_fTested = false;
983 if (RT_LIKELY(s_fTested)) { /* likely */ }
984 else
985 {
986 s_fTested = true;
987 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
988 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
989 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
990 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
991 }
992#endif
993
994 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
995 {
996 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
997 IEMOP_HLP_MIN_286();
998 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
999 }
1000 /* @opdone */
1001
1002 /*
1003 * On the 8086 this is a POP CS instruction.
1004 * For the time being we don't specify this this.
1005 */
1006 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
1007 IEMOP_HLP_NO_64BIT();
1008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1009 /** @todo eliminate END_TB here */
1010 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1011 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1012 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_CS),
1013 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1014}
1015
1016/**
1017 * @opcode 0x10
1018 * @opgroup og_gen_arith_bin
1019 * @opfltest cf
1020 * @opflmodify cf,pf,af,zf,sf,of
1021 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1022 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1023 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1024 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1025 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1026 */
1027FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1028{
1029 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1030 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_adc_u8);
1031 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked);
1032}
1033
1034
1035/**
1036 * @opcode 0x11
1037 * @opgroup og_gen_arith_bin
1038 * @opfltest cf
1039 * @opflmodify cf,pf,af,zf,sf,of
1040 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1041 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1042 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1043 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1044 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1045 */
1046FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1047{
1048 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1049 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
1050 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1051}
1052
1053
1054/**
1055 * @opcode 0x12
1056 * @opgroup og_gen_arith_bin
1057 * @opfltest cf
1058 * @opflmodify cf,pf,af,zf,sf,of
1059 * @opcopytests iemOp_adc_Eb_Gb
1060 */
1061FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1062{
1063 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1064 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
1065}
1066
1067
1068/**
1069 * @opcode 0x13
1070 * @opgroup og_gen_arith_bin
1071 * @opfltest cf
1072 * @opflmodify cf,pf,af,zf,sf,of
1073 * @opcopytests iemOp_adc_Ev_Gv
1074 */
1075FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1076{
1077 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1078 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1, 0);
1079}
1080
1081
1082/**
1083 * @opcode 0x14
1084 * @opgroup og_gen_arith_bin
1085 * @opfltest cf
1086 * @opflmodify cf,pf,af,zf,sf,of
1087 * @opcopytests iemOp_adc_Eb_Gb
1088 */
1089FNIEMOP_DEF(iemOp_adc_Al_Ib)
1090{
1091 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1092 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1093}
1094
1095
1096/**
1097 * @opcode 0x15
1098 * @opgroup og_gen_arith_bin
1099 * @opfltest cf
1100 * @opflmodify cf,pf,af,zf,sf,of
1101 * @opcopytests iemOp_adc_Ev_Gv
1102 */
1103FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1104{
1105 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1106 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1107}
1108
1109
1110/**
1111 * @opcode 0x16
1112 */
1113FNIEMOP_DEF(iemOp_push_SS)
1114{
1115 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1116 IEMOP_HLP_NO_64BIT();
1117 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1118}
1119
1120
1121/**
1122 * @opcode 0x17
1123 * @opgroup og_gen_arith_bin
1124 * @opfltest cf
1125 * @opflmodify cf,pf,af,zf,sf,of
1126 */
1127FNIEMOP_DEF(iemOp_pop_SS)
1128{
1129 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1131 IEMOP_HLP_NO_64BIT();
1132 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1133 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1134 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_SS)
1135 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_SS)
1136 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_SS),
1137 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1138}
1139
1140
1141/**
1142 * @opcode 0x18
1143 * @opgroup og_gen_arith_bin
1144 * @opfltest cf
1145 * @opflmodify cf,pf,af,zf,sf,of
1146 */
1147FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1148{
1149 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1150 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sbb_u8);
1151 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked);
1152}
1153
1154
1155/**
1156 * @opcode 0x19
1157 * @opgroup og_gen_arith_bin
1158 * @opfltest cf
1159 * @opflmodify cf,pf,af,zf,sf,of
1160 */
1161FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1162{
1163 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1164 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
1165 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1166}
1167
1168
1169/**
1170 * @opcode 0x1a
1171 * @opgroup og_gen_arith_bin
1172 * @opfltest cf
1173 * @opflmodify cf,pf,af,zf,sf,of
1174 */
1175FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1176{
1177 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1178 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
1179}
1180
1181
1182/**
1183 * @opcode 0x1b
1184 * @opgroup og_gen_arith_bin
1185 * @opfltest cf
1186 * @opflmodify cf,pf,af,zf,sf,of
1187 */
1188FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1189{
1190 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1191 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1, 0);
1192}
1193
1194
1195/**
1196 * @opcode 0x1c
1197 * @opgroup og_gen_arith_bin
1198 * @opfltest cf
1199 * @opflmodify cf,pf,af,zf,sf,of
1200 */
1201FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1202{
1203 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1204 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1205}
1206
1207
1208/**
1209 * @opcode 0x1d
1210 * @opgroup og_gen_arith_bin
1211 * @opfltest cf
1212 * @opflmodify cf,pf,af,zf,sf,of
1213 */
1214FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1215{
1216 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1217 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1218}
1219
1220
1221/**
1222 * @opcode 0x1e
1223 * @opgroup og_stack_sreg
1224 */
1225FNIEMOP_DEF(iemOp_push_DS)
1226{
1227 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1228 IEMOP_HLP_NO_64BIT();
1229 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1230}
1231
1232
1233/**
1234 * @opcode 0x1f
1235 * @opgroup og_stack_sreg
1236 */
1237FNIEMOP_DEF(iemOp_pop_DS)
1238{
1239 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1241 IEMOP_HLP_NO_64BIT();
1242 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1243 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1244 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
1245 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
1246 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS),
1247 iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1248}
1249
1250
1251/**
1252 * @opcode 0x20
1253 * @opgroup og_gen_arith_bin
1254 * @opflmodify cf,pf,af,zf,sf,of
1255 * @opflundef af
1256 * @opflclear of,cf
1257 */
1258FNIEMOP_DEF(iemOp_and_Eb_Gb)
1259{
1260 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1261 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1262 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_and_u8);
1263 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked);
1264}
1265
1266
1267/**
1268 * @opcode 0x21
1269 * @opgroup og_gen_arith_bin
1270 * @opflmodify cf,pf,af,zf,sf,of
1271 * @opflundef af
1272 * @opflclear of,cf
1273 */
1274FNIEMOP_DEF(iemOp_and_Ev_Gv)
1275{
1276 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1277 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1278 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
1279 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1280}
1281
1282
1283/**
1284 * @opcode 0x22
1285 * @opgroup og_gen_arith_bin
1286 * @opflmodify cf,pf,af,zf,sf,of
1287 * @opflundef af
1288 * @opflclear of,cf
1289 */
1290FNIEMOP_DEF(iemOp_and_Gb_Eb)
1291{
1292 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1293 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1294 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1295}
1296
1297
1298/**
1299 * @opcode 0x23
1300 * @opgroup og_gen_arith_bin
1301 * @opflmodify cf,pf,af,zf,sf,of
1302 * @opflundef af
1303 * @opflclear of,cf
1304 */
1305FNIEMOP_DEF(iemOp_and_Gv_Ev)
1306{
1307 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1308 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1309 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1, 0);
1310}
1311
1312
1313/**
1314 * @opcode 0x24
1315 * @opgroup og_gen_arith_bin
1316 * @opflmodify cf,pf,af,zf,sf,of
1317 * @opflundef af
1318 * @opflclear of,cf
1319 */
1320FNIEMOP_DEF(iemOp_and_Al_Ib)
1321{
1322 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1323 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1324 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1325}
1326
1327
1328/**
1329 * @opcode 0x25
1330 * @opgroup og_gen_arith_bin
1331 * @opflmodify cf,pf,af,zf,sf,of
1332 * @opflundef af
1333 * @opflclear of,cf
1334 */
1335FNIEMOP_DEF(iemOp_and_eAX_Iz)
1336{
1337 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1338 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1339 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1340}
1341
1342
1343/**
1344 * @opcode 0x26
1345 * @opmnemonic SEG
1346 * @op1 ES
1347 * @opgroup og_prefix
1348 * @openc prefix
1349 * @opdisenum OP_SEG
1350 * @ophints harmless
1351 */
1352FNIEMOP_DEF(iemOp_seg_ES)
1353{
1354 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1355 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1356 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1357
1358 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1359 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1360}
1361
1362
1363/**
1364 * @opcode 0x27
1365 * @opfltest af,cf
1366 * @opflmodify cf,pf,af,zf,sf,of
1367 * @opflundef of
1368 */
1369FNIEMOP_DEF(iemOp_daa)
1370{
1371 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1372 IEMOP_HLP_NO_64BIT();
1373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1374 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1375 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_daa);
1376}
1377
1378
1379/**
1380 * @opcode 0x28
1381 * @opgroup og_gen_arith_bin
1382 * @opflmodify cf,pf,af,zf,sf,of
1383 */
1384FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1385{
1386 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1387 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sub_u8);
1388 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked);
1389}
1390
1391
1392/**
1393 * @opcode 0x29
1394 * @opgroup og_gen_arith_bin
1395 * @opflmodify cf,pf,af,zf,sf,of
1396 */
1397FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1398{
1399 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1400 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
1401 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1402}
1403
1404
1405/**
1406 * @opcode 0x2a
1407 * @opgroup og_gen_arith_bin
1408 * @opflmodify cf,pf,af,zf,sf,of
1409 */
1410FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1411{
1412 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1413 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1414}
1415
1416
1417/**
1418 * @opcode 0x2b
1419 * @opgroup og_gen_arith_bin
1420 * @opflmodify cf,pf,af,zf,sf,of
1421 */
1422FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1423{
1424 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1425 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1, 0);
1426}
1427
1428
1429/**
1430 * @opcode 0x2c
1431 * @opgroup og_gen_arith_bin
1432 * @opflmodify cf,pf,af,zf,sf,of
1433 */
1434FNIEMOP_DEF(iemOp_sub_Al_Ib)
1435{
1436 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1437 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1438}
1439
1440
1441/**
1442 * @opcode 0x2d
1443 * @opgroup og_gen_arith_bin
1444 * @opflmodify cf,pf,af,zf,sf,of
1445 */
1446FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1447{
1448 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1449 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1450}
1451
1452
1453/**
1454 * @opcode 0x2e
1455 * @opmnemonic SEG
1456 * @op1 CS
1457 * @opgroup og_prefix
1458 * @openc prefix
1459 * @opdisenum OP_SEG
1460 * @ophints harmless
1461 */
1462FNIEMOP_DEF(iemOp_seg_CS)
1463{
1464 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1465 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1466 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1467
1468 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1469 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1470}
1471
1472
1473/**
1474 * @opcode 0x2f
1475 * @opfltest af,cf
1476 * @opflmodify cf,pf,af,zf,sf,of
1477 * @opflundef of
1478 */
1479FNIEMOP_DEF(iemOp_das)
1480{
1481 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1482 IEMOP_HLP_NO_64BIT();
1483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1484 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1485 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_das);
1486}
1487
1488
1489/**
1490 * @opcode 0x30
1491 * @opgroup og_gen_arith_bin
1492 * @opflmodify cf,pf,af,zf,sf,of
1493 * @opflundef af
1494 * @opflclear of,cf
1495 */
1496FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1497{
1498 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1499 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1500 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_xor_u8);
1501 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked);
1502}
1503
1504
1505/**
1506 * @opcode 0x31
1507 * @opgroup og_gen_arith_bin
1508 * @opflmodify cf,pf,af,zf,sf,of
1509 * @opflundef af
1510 * @opflclear of,cf
1511 */
1512FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1513{
1514 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1515 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1516 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
1517 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1518}
1519
1520
1521/**
1522 * @opcode 0x32
1523 * @opgroup og_gen_arith_bin
1524 * @opflmodify cf,pf,af,zf,sf,of
1525 * @opflundef af
1526 * @opflclear of,cf
1527 */
1528FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1529{
1530 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1531 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1532 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1533}
1534
1535
1536/**
1537 * @opcode 0x33
1538 * @opgroup og_gen_arith_bin
1539 * @opflmodify cf,pf,af,zf,sf,of
1540 * @opflundef af
1541 * @opflclear of,cf
1542 */
1543FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1544{
1545 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1546 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1547 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1, 0);
1548}
1549
1550
1551/**
1552 * @opcode 0x34
1553 * @opgroup og_gen_arith_bin
1554 * @opflmodify cf,pf,af,zf,sf,of
1555 * @opflundef af
1556 * @opflclear of,cf
1557 */
1558FNIEMOP_DEF(iemOp_xor_Al_Ib)
1559{
1560 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1561 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1562 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1563}
1564
1565
1566/**
1567 * @opcode 0x35
1568 * @opgroup og_gen_arith_bin
1569 * @opflmodify cf,pf,af,zf,sf,of
1570 * @opflundef af
1571 * @opflclear of,cf
1572 */
1573FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1574{
1575 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1576 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1577 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1578}
1579
1580
1581/**
1582 * @opcode 0x36
1583 * @opmnemonic SEG
1584 * @op1 SS
1585 * @opgroup og_prefix
1586 * @openc prefix
1587 * @opdisenum OP_SEG
1588 * @ophints harmless
1589 */
1590FNIEMOP_DEF(iemOp_seg_SS)
1591{
1592 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1593 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1594 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1595
1596 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1597 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1598}
1599
1600
1601/**
1602 * @opcode 0x37
1603 * @opfltest af,cf
1604 * @opflmodify cf,pf,af,zf,sf,of
1605 * @opflundef pf,zf,sf,of
1606 * @opgroup og_gen_arith_dec
1607 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1608 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1609 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1610 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1611 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1612 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1613 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1614 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1615 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1616 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1617 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1618 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1619 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1620 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1621 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1622 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1623 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1624 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1625 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1626 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1627 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1628 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1629 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1630 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1631 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1632 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1633 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1634 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1635 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1636 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1637 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1638 */
1639FNIEMOP_DEF(iemOp_aaa)
1640{
1641 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1642 IEMOP_HLP_NO_64BIT();
1643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1644 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1645
1646 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aaa);
1647}
1648
1649
1650/**
1651 * @opcode 0x38
1652 */
1653FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1654{
1655 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1656 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8);
1657 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
1658}
1659
1660
1661/**
1662 * @opcode 0x39
1663 */
1664FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1665{
1666 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1667 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
1668}
1669
1670
1671/**
1672 * @opcode 0x3a
1673 */
1674FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1675{
1676 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1677 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1678}
1679
1680
1681/**
1682 * @opcode 0x3b
1683 */
1684FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1685{
1686 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1687 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0, 0);
1688}
1689
1690
1691/**
1692 * @opcode 0x3c
1693 */
1694FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1695{
1696 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1697 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1698}
1699
1700
1701/**
1702 * @opcode 0x3d
1703 */
1704FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1705{
1706 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1707 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1708}
1709
1710
1711/**
1712 * @opcode 0x3e
1713 */
1714FNIEMOP_DEF(iemOp_seg_DS)
1715{
1716 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1717 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1718 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1719
1720 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1721 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1722}
1723
1724
1725/**
1726 * @opcode 0x3f
1727 * @opfltest af,cf
1728 * @opflmodify cf,pf,af,zf,sf,of
1729 * @opflundef pf,zf,sf,of
1730 * @opgroup og_gen_arith_dec
1731 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1732 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1733 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1734 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1735 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1736 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1737 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1738 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1739 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1740 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1741 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1742 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1743 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1744 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1745 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1746 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1747 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1748 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1749 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1750 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1751 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1752 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1753 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1754 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1755 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1756 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1757 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1758 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1759 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1760 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1761 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1762 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1763 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1764 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1765 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1766 */
1767FNIEMOP_DEF(iemOp_aas)
1768{
1769 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1770 IEMOP_HLP_NO_64BIT();
1771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1772 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1773
1774 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aas);
1775}
1776
1777
1778/**
1779 * Common 'inc/dec register' helper.
1780 *
1781 * Not for 64-bit code, only for what became the rex prefixes.
1782 */
1783#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1784 switch (pVCpu->iem.s.enmEffOpSize) \
1785 { \
1786 case IEMMODE_16BIT: \
1787 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT, 0); \
1788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1789 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1790 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1791 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1792 IEM_MC_REF_EFLAGS(pEFlags); \
1793 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1794 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1795 IEM_MC_END(); \
1796 break; \
1797 \
1798 case IEMMODE_32BIT: \
1799 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
1800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1801 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1802 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1803 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1804 IEM_MC_REF_EFLAGS(pEFlags); \
1805 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1806 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
1807 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1808 IEM_MC_END(); \
1809 break; \
1810 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1811 } \
1812 (void)0
1813
1814/**
1815 * @opcode 0x40
1816 */
1817FNIEMOP_DEF(iemOp_inc_eAX)
1818{
1819 /*
1820 * This is a REX prefix in 64-bit mode.
1821 */
1822 if (IEM_IS_64BIT_CODE(pVCpu))
1823 {
1824 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1825 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1826
1827 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1828 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1829 }
1830
1831 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1832 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1833}
1834
1835
1836/**
1837 * @opcode 0x41
1838 */
1839FNIEMOP_DEF(iemOp_inc_eCX)
1840{
1841 /*
1842 * This is a REX prefix in 64-bit mode.
1843 */
1844 if (IEM_IS_64BIT_CODE(pVCpu))
1845 {
1846 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1847 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1848 pVCpu->iem.s.uRexB = 1 << 3;
1849
1850 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1851 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1852 }
1853
1854 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1855 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1856}
1857
1858
1859/**
1860 * @opcode 0x42
1861 */
1862FNIEMOP_DEF(iemOp_inc_eDX)
1863{
1864 /*
1865 * This is a REX prefix in 64-bit mode.
1866 */
1867 if (IEM_IS_64BIT_CODE(pVCpu))
1868 {
1869 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1870 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1871 pVCpu->iem.s.uRexIndex = 1 << 3;
1872
1873 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1874 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1875 }
1876
1877 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1878 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1879}
1880
1881
1882
1883/**
1884 * @opcode 0x43
1885 */
1886FNIEMOP_DEF(iemOp_inc_eBX)
1887{
1888 /*
1889 * This is a REX prefix in 64-bit mode.
1890 */
1891 if (IEM_IS_64BIT_CODE(pVCpu))
1892 {
1893 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1894 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1895 pVCpu->iem.s.uRexB = 1 << 3;
1896 pVCpu->iem.s.uRexIndex = 1 << 3;
1897
1898 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1899 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1900 }
1901
1902 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1903 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1904}
1905
1906
1907/**
1908 * @opcode 0x44
1909 */
1910FNIEMOP_DEF(iemOp_inc_eSP)
1911{
1912 /*
1913 * This is a REX prefix in 64-bit mode.
1914 */
1915 if (IEM_IS_64BIT_CODE(pVCpu))
1916 {
1917 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1918 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1919 pVCpu->iem.s.uRexReg = 1 << 3;
1920
1921 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1922 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1923 }
1924
1925 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1926 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1927}
1928
1929
1930/**
1931 * @opcode 0x45
1932 */
1933FNIEMOP_DEF(iemOp_inc_eBP)
1934{
1935 /*
1936 * This is a REX prefix in 64-bit mode.
1937 */
1938 if (IEM_IS_64BIT_CODE(pVCpu))
1939 {
1940 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1941 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1942 pVCpu->iem.s.uRexReg = 1 << 3;
1943 pVCpu->iem.s.uRexB = 1 << 3;
1944
1945 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1946 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1947 }
1948
1949 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1950 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1951}
1952
1953
1954/**
1955 * @opcode 0x46
1956 */
1957FNIEMOP_DEF(iemOp_inc_eSI)
1958{
1959 /*
1960 * This is a REX prefix in 64-bit mode.
1961 */
1962 if (IEM_IS_64BIT_CODE(pVCpu))
1963 {
1964 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1965 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1966 pVCpu->iem.s.uRexReg = 1 << 3;
1967 pVCpu->iem.s.uRexIndex = 1 << 3;
1968
1969 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1970 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1971 }
1972
1973 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1974 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1975}
1976
1977
1978/**
1979 * @opcode 0x47
1980 */
1981FNIEMOP_DEF(iemOp_inc_eDI)
1982{
1983 /*
1984 * This is a REX prefix in 64-bit mode.
1985 */
1986 if (IEM_IS_64BIT_CODE(pVCpu))
1987 {
1988 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1989 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1990 pVCpu->iem.s.uRexReg = 1 << 3;
1991 pVCpu->iem.s.uRexB = 1 << 3;
1992 pVCpu->iem.s.uRexIndex = 1 << 3;
1993
1994 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1995 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1996 }
1997
1998 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1999 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
2000}
2001
2002
2003/**
2004 * @opcode 0x48
2005 */
2006FNIEMOP_DEF(iemOp_dec_eAX)
2007{
2008 /*
2009 * This is a REX prefix in 64-bit mode.
2010 */
2011 if (IEM_IS_64BIT_CODE(pVCpu))
2012 {
2013 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
2014 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
2015 iemRecalEffOpSize(pVCpu);
2016
2017 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2018 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2019 }
2020
2021 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2022 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2023}
2024
2025
2026/**
2027 * @opcode 0x49
2028 */
2029FNIEMOP_DEF(iemOp_dec_eCX)
2030{
2031 /*
2032 * This is a REX prefix in 64-bit mode.
2033 */
2034 if (IEM_IS_64BIT_CODE(pVCpu))
2035 {
2036 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2037 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2038 pVCpu->iem.s.uRexB = 1 << 3;
2039 iemRecalEffOpSize(pVCpu);
2040
2041 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2042 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2043 }
2044
2045 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2046 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2047}
2048
2049
2050/**
2051 * @opcode 0x4a
2052 */
2053FNIEMOP_DEF(iemOp_dec_eDX)
2054{
2055 /*
2056 * This is a REX prefix in 64-bit mode.
2057 */
2058 if (IEM_IS_64BIT_CODE(pVCpu))
2059 {
2060 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2061 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2062 pVCpu->iem.s.uRexIndex = 1 << 3;
2063 iemRecalEffOpSize(pVCpu);
2064
2065 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2066 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2067 }
2068
2069 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2070 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2071}
2072
2073
2074/**
2075 * @opcode 0x4b
2076 */
2077FNIEMOP_DEF(iemOp_dec_eBX)
2078{
2079 /*
2080 * This is a REX prefix in 64-bit mode.
2081 */
2082 if (IEM_IS_64BIT_CODE(pVCpu))
2083 {
2084 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2085 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2086 pVCpu->iem.s.uRexB = 1 << 3;
2087 pVCpu->iem.s.uRexIndex = 1 << 3;
2088 iemRecalEffOpSize(pVCpu);
2089
2090 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2091 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2092 }
2093
2094 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2095 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2096}
2097
2098
2099/**
2100 * @opcode 0x4c
2101 */
2102FNIEMOP_DEF(iemOp_dec_eSP)
2103{
2104 /*
2105 * This is a REX prefix in 64-bit mode.
2106 */
2107 if (IEM_IS_64BIT_CODE(pVCpu))
2108 {
2109 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2110 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2111 pVCpu->iem.s.uRexReg = 1 << 3;
2112 iemRecalEffOpSize(pVCpu);
2113
2114 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2115 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2116 }
2117
2118 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2119 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2120}
2121
2122
2123/**
2124 * @opcode 0x4d
2125 */
2126FNIEMOP_DEF(iemOp_dec_eBP)
2127{
2128 /*
2129 * This is a REX prefix in 64-bit mode.
2130 */
2131 if (IEM_IS_64BIT_CODE(pVCpu))
2132 {
2133 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2134 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2135 pVCpu->iem.s.uRexReg = 1 << 3;
2136 pVCpu->iem.s.uRexB = 1 << 3;
2137 iemRecalEffOpSize(pVCpu);
2138
2139 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2140 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2141 }
2142
2143 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2144 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2145}
2146
2147
2148/**
2149 * @opcode 0x4e
2150 */
2151FNIEMOP_DEF(iemOp_dec_eSI)
2152{
2153 /*
2154 * This is a REX prefix in 64-bit mode.
2155 */
2156 if (IEM_IS_64BIT_CODE(pVCpu))
2157 {
2158 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2159 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2160 pVCpu->iem.s.uRexReg = 1 << 3;
2161 pVCpu->iem.s.uRexIndex = 1 << 3;
2162 iemRecalEffOpSize(pVCpu);
2163
2164 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2165 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2166 }
2167
2168 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2169 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2170}
2171
2172
2173/**
2174 * @opcode 0x4f
2175 */
2176FNIEMOP_DEF(iemOp_dec_eDI)
2177{
2178 /*
2179 * This is a REX prefix in 64-bit mode.
2180 */
2181 if (IEM_IS_64BIT_CODE(pVCpu))
2182 {
2183 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2184 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2185 pVCpu->iem.s.uRexReg = 1 << 3;
2186 pVCpu->iem.s.uRexB = 1 << 3;
2187 pVCpu->iem.s.uRexIndex = 1 << 3;
2188 iemRecalEffOpSize(pVCpu);
2189
2190 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2191 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2192 }
2193
2194 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2195 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2196}
2197
2198
2199/**
2200 * Common 'push register' helper.
2201 */
2202FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2203{
2204 if (IEM_IS_64BIT_CODE(pVCpu))
2205 {
2206 iReg |= pVCpu->iem.s.uRexB;
2207 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2208 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2209 }
2210
2211 switch (pVCpu->iem.s.enmEffOpSize)
2212 {
2213 case IEMMODE_16BIT:
2214 IEM_MC_BEGIN(0, 1, 0, 0);
2215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2216 IEM_MC_LOCAL(uint16_t, u16Value);
2217 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2218 IEM_MC_PUSH_U16(u16Value);
2219 IEM_MC_ADVANCE_RIP_AND_FINISH();
2220 IEM_MC_END();
2221 break;
2222
2223 case IEMMODE_32BIT:
2224 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2226 IEM_MC_LOCAL(uint32_t, u32Value);
2227 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2228 IEM_MC_PUSH_U32(u32Value);
2229 IEM_MC_ADVANCE_RIP_AND_FINISH();
2230 IEM_MC_END();
2231 break;
2232
2233 case IEMMODE_64BIT:
2234 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2236 IEM_MC_LOCAL(uint64_t, u64Value);
2237 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2238 IEM_MC_PUSH_U64(u64Value);
2239 IEM_MC_ADVANCE_RIP_AND_FINISH();
2240 IEM_MC_END();
2241 break;
2242
2243 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2244 }
2245}
2246
2247
2248/**
2249 * @opcode 0x50
2250 */
2251FNIEMOP_DEF(iemOp_push_eAX)
2252{
2253 IEMOP_MNEMONIC(push_rAX, "push rAX");
2254 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2255}
2256
2257
2258/**
2259 * @opcode 0x51
2260 */
2261FNIEMOP_DEF(iemOp_push_eCX)
2262{
2263 IEMOP_MNEMONIC(push_rCX, "push rCX");
2264 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2265}
2266
2267
2268/**
2269 * @opcode 0x52
2270 */
2271FNIEMOP_DEF(iemOp_push_eDX)
2272{
2273 IEMOP_MNEMONIC(push_rDX, "push rDX");
2274 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2275}
2276
2277
2278/**
2279 * @opcode 0x53
2280 */
2281FNIEMOP_DEF(iemOp_push_eBX)
2282{
2283 IEMOP_MNEMONIC(push_rBX, "push rBX");
2284 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2285}
2286
2287
2288/**
2289 * @opcode 0x54
2290 */
2291FNIEMOP_DEF(iemOp_push_eSP)
2292{
2293 IEMOP_MNEMONIC(push_rSP, "push rSP");
2294 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
2295 {
2296 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086, 0);
2297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2298 IEM_MC_LOCAL(uint16_t, u16Value);
2299 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2300 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2301 IEM_MC_PUSH_U16(u16Value);
2302 IEM_MC_ADVANCE_RIP_AND_FINISH();
2303 IEM_MC_END();
2304 }
2305 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2306}
2307
2308
2309/**
2310 * @opcode 0x55
2311 */
2312FNIEMOP_DEF(iemOp_push_eBP)
2313{
2314 IEMOP_MNEMONIC(push_rBP, "push rBP");
2315 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2316}
2317
2318
2319/**
2320 * @opcode 0x56
2321 */
2322FNIEMOP_DEF(iemOp_push_eSI)
2323{
2324 IEMOP_MNEMONIC(push_rSI, "push rSI");
2325 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2326}
2327
2328
2329/**
2330 * @opcode 0x57
2331 */
2332FNIEMOP_DEF(iemOp_push_eDI)
2333{
2334 IEMOP_MNEMONIC(push_rDI, "push rDI");
2335 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2336}
2337
2338
2339/**
2340 * Common 'pop register' helper.
2341 */
2342FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2343{
2344 if (IEM_IS_64BIT_CODE(pVCpu))
2345 {
2346 iReg |= pVCpu->iem.s.uRexB;
2347 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2348 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2349 }
2350
2351 switch (pVCpu->iem.s.enmEffOpSize)
2352 {
2353 case IEMMODE_16BIT:
2354 IEM_MC_BEGIN(0, 1, 0, 0);
2355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2356 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2357 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
2358 IEM_MC_POP_U16(pu16Dst);
2359 IEM_MC_ADVANCE_RIP_AND_FINISH();
2360 IEM_MC_END();
2361 break;
2362
2363 case IEMMODE_32BIT:
2364 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2366 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2367 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
2368 IEM_MC_POP_U32(pu32Dst);
2369 IEM_MC_CLEAR_HIGH_GREG_U64(iReg); /** @todo testcase*/
2370 IEM_MC_ADVANCE_RIP_AND_FINISH();
2371 IEM_MC_END();
2372 break;
2373
2374 case IEMMODE_64BIT:
2375 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2377 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2378 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
2379 IEM_MC_POP_U64(pu64Dst);
2380 IEM_MC_ADVANCE_RIP_AND_FINISH();
2381 IEM_MC_END();
2382 break;
2383
2384 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2385 }
2386}
2387
2388
2389/**
2390 * @opcode 0x58
2391 */
2392FNIEMOP_DEF(iemOp_pop_eAX)
2393{
2394 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2395 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2396}
2397
2398
2399/**
2400 * @opcode 0x59
2401 */
2402FNIEMOP_DEF(iemOp_pop_eCX)
2403{
2404 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2405 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2406}
2407
2408
2409/**
2410 * @opcode 0x5a
2411 */
2412FNIEMOP_DEF(iemOp_pop_eDX)
2413{
2414 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2415 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2416}
2417
2418
2419/**
2420 * @opcode 0x5b
2421 */
2422FNIEMOP_DEF(iemOp_pop_eBX)
2423{
2424 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2425 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2426}
2427
2428
2429/**
2430 * @opcode 0x5c
2431 */
2432FNIEMOP_DEF(iemOp_pop_eSP)
2433{
2434 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2435 if (IEM_IS_64BIT_CODE(pVCpu))
2436 {
2437 if (pVCpu->iem.s.uRexB)
2438 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2439 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2440 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2441 }
2442
2443 /** @todo add testcase for this instruction. */
2444 switch (pVCpu->iem.s.enmEffOpSize)
2445 {
2446 case IEMMODE_16BIT:
2447 IEM_MC_BEGIN(0, 2, 0, 0);
2448 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2449 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2450 IEM_MC_LOCAL(uint16_t, u16Dst);
2451 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2452 IEM_MC_REF_LOCAL(pu16Dst, u16Dst);
2453 IEM_MC_POP_U16(pu16Dst); /** @todo not correct MC, fix later. */
2454 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
2455 IEM_MC_ADVANCE_RIP_AND_FINISH();
2456 IEM_MC_END();
2457 break;
2458
2459 case IEMMODE_32BIT:
2460 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
2461 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2462 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2463 IEM_MC_LOCAL(uint32_t, u32Dst);
2464 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2465 IEM_MC_REF_LOCAL(pu32Dst, u32Dst);
2466 IEM_MC_POP_U32(pu32Dst);
2467 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
2468 IEM_MC_ADVANCE_RIP_AND_FINISH();
2469 IEM_MC_END();
2470 break;
2471
2472 case IEMMODE_64BIT:
2473 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
2474 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2475 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2476 IEM_MC_LOCAL(uint64_t, u64Dst);
2477 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2478 IEM_MC_REF_LOCAL(pu64Dst, u64Dst);
2479 IEM_MC_POP_U64(pu64Dst);
2480 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
2481 IEM_MC_ADVANCE_RIP_AND_FINISH();
2482 IEM_MC_END();
2483 break;
2484
2485 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2486 }
2487}
2488
2489
2490/**
2491 * @opcode 0x5d
2492 */
2493FNIEMOP_DEF(iemOp_pop_eBP)
2494{
2495 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2496 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2497}
2498
2499
2500/**
2501 * @opcode 0x5e
2502 */
2503FNIEMOP_DEF(iemOp_pop_eSI)
2504{
2505 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2506 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2507}
2508
2509
2510/**
2511 * @opcode 0x5f
2512 */
2513FNIEMOP_DEF(iemOp_pop_eDI)
2514{
2515 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2516 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2517}
2518
2519
2520/**
2521 * @opcode 0x60
2522 */
2523FNIEMOP_DEF(iemOp_pusha)
2524{
2525 IEMOP_MNEMONIC(pusha, "pusha");
2526 IEMOP_HLP_MIN_186();
2527 IEMOP_HLP_NO_64BIT();
2528 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2529 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_16);
2530 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2531 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_32);
2532}
2533
2534
2535/**
2536 * @opcode 0x61
2537 */
2538FNIEMOP_DEF(iemOp_popa__mvex)
2539{
2540 if (!IEM_IS_64BIT_CODE(pVCpu))
2541 {
2542 IEMOP_MNEMONIC(popa, "popa");
2543 IEMOP_HLP_MIN_186();
2544 IEMOP_HLP_NO_64BIT();
2545 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2546 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2547 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2548 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2549 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2550 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2551 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2552 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2553 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2554 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2555 iemCImpl_popa_16);
2556 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2557 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2558 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2559 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2560 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2561 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2562 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2563 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2564 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2565 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2566 iemCImpl_popa_32);
2567 }
2568 IEMOP_MNEMONIC(mvex, "mvex");
2569 Log(("mvex prefix is not supported!\n"));
2570 IEMOP_RAISE_INVALID_OPCODE_RET();
2571}
2572
2573
2574/**
2575 * @opcode 0x62
2576 * @opmnemonic bound
2577 * @op1 Gv_RO
2578 * @op2 Ma
2579 * @opmincpu 80186
2580 * @ophints harmless x86_invalid_64
2581 * @optest op1=0 op2=0 ->
2582 * @optest op1=1 op2=0 -> value.xcpt=5
2583 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2584 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2585 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2586 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2587 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2588 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2589 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2590 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2591 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2592 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2593 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2594 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2595 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2596 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2597 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2598 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2599 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2600 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2601 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2602 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2603 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2604 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2605 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2606 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2607 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2608 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2609 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2610 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2611 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2612 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2613 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2614 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2615 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2616 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2617 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2618 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2619 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2620 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2621 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2622 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2623 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2624 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2625 */
2626FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2627{
2628 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2629 compatability mode it is invalid with MOD=3.
2630
2631 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2632 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2633 given as R and X without an exact description, so we assume it builds on
2634 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2635 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2636 uint8_t bRm;
2637 if (!IEM_IS_64BIT_CODE(pVCpu))
2638 {
2639 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2640 IEMOP_HLP_MIN_186();
2641 IEM_OPCODE_GET_NEXT_U8(&bRm);
2642 if (IEM_IS_MODRM_MEM_MODE(bRm))
2643 {
2644 /** @todo testcase: check that there are two memory accesses involved. Check
2645 * whether they're both read before the \#BR triggers. */
2646 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2647 {
2648 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
2649 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2650 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2651 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2653
2654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2656
2657 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2658 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2659 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2660
2661 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2662 IEM_MC_END();
2663 }
2664 else /* 32-bit operands */
2665 {
2666 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2667 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2668 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2669 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2671
2672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2674
2675 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2676 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2677 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2678
2679 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2680 IEM_MC_END();
2681 }
2682 }
2683
2684 /*
2685 * @opdone
2686 */
2687 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2688 {
2689 /* Note that there is no need for the CPU to fetch further bytes
2690 here because MODRM.MOD == 3. */
2691 Log(("evex not supported by the guest CPU!\n"));
2692 IEMOP_RAISE_INVALID_OPCODE_RET();
2693 }
2694 }
2695 else
2696 {
2697 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2698 * does modr/m read, whereas AMD probably doesn't... */
2699 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2700 {
2701 Log(("evex not supported by the guest CPU!\n"));
2702 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2703 }
2704 IEM_OPCODE_GET_NEXT_U8(&bRm);
2705 }
2706
2707 IEMOP_MNEMONIC(evex, "evex");
2708 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2709 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2710 Log(("evex prefix is not implemented!\n"));
2711 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2712}
2713
2714
2715/** Opcode 0x63 - non-64-bit modes. */
2716FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2717{
2718 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2719 IEMOP_HLP_MIN_286();
2720 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2721 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2722
2723 if (IEM_IS_MODRM_REG_MODE(bRm))
2724 {
2725 /* Register */
2726 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2727 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2728 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2729 IEM_MC_ARG(uint16_t, u16Src, 1);
2730 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2731
2732 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2733 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2734 IEM_MC_REF_EFLAGS(pEFlags);
2735 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2736
2737 IEM_MC_ADVANCE_RIP_AND_FINISH();
2738 IEM_MC_END();
2739 }
2740 else
2741 {
2742 /* Memory */
2743 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2744 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2745 IEM_MC_ARG(uint16_t, u16Src, 1);
2746 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2748 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2749
2750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2751 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2752 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2753 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2754 IEM_MC_FETCH_EFLAGS(EFlags);
2755 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2756
2757 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
2758 IEM_MC_COMMIT_EFLAGS(EFlags);
2759 IEM_MC_ADVANCE_RIP_AND_FINISH();
2760 IEM_MC_END();
2761 }
2762}
2763
2764
2765/**
2766 * @opcode 0x63
2767 *
2768 * @note This is a weird one. It works like a regular move instruction if
2769 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2770 * @todo This definitely needs a testcase to verify the odd cases. */
2771FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2772{
2773 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2774
2775 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2777
2778 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2779 {
2780 if (IEM_IS_MODRM_REG_MODE(bRm))
2781 {
2782 /*
2783 * Register to register.
2784 */
2785 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2787 IEM_MC_LOCAL(uint64_t, u64Value);
2788 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2789 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2790 IEM_MC_ADVANCE_RIP_AND_FINISH();
2791 IEM_MC_END();
2792 }
2793 else
2794 {
2795 /*
2796 * We're loading a register from memory.
2797 */
2798 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
2799 IEM_MC_LOCAL(uint64_t, u64Value);
2800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2803 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2804 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2805 IEM_MC_ADVANCE_RIP_AND_FINISH();
2806 IEM_MC_END();
2807 }
2808 }
2809 else
2810 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2811}
2812
2813
2814/**
2815 * @opcode 0x64
2816 * @opmnemonic segfs
2817 * @opmincpu 80386
2818 * @opgroup og_prefixes
2819 */
2820FNIEMOP_DEF(iemOp_seg_FS)
2821{
2822 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2823 IEMOP_HLP_MIN_386();
2824
2825 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2826 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2827
2828 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2829 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2830}
2831
2832
2833/**
2834 * @opcode 0x65
2835 * @opmnemonic seggs
2836 * @opmincpu 80386
2837 * @opgroup og_prefixes
2838 */
2839FNIEMOP_DEF(iemOp_seg_GS)
2840{
2841 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2842 IEMOP_HLP_MIN_386();
2843
2844 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2845 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2846
2847 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2848 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2849}
2850
2851
2852/**
2853 * @opcode 0x66
2854 * @opmnemonic opsize
2855 * @openc prefix
2856 * @opmincpu 80386
2857 * @ophints harmless
2858 * @opgroup og_prefixes
2859 */
2860FNIEMOP_DEF(iemOp_op_size)
2861{
2862 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2863 IEMOP_HLP_MIN_386();
2864
2865 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2866 iemRecalEffOpSize(pVCpu);
2867
2868 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2869 when REPZ or REPNZ are present. */
2870 if (pVCpu->iem.s.idxPrefix == 0)
2871 pVCpu->iem.s.idxPrefix = 1;
2872
2873 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2874 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2875}
2876
2877
2878/**
2879 * @opcode 0x67
2880 * @opmnemonic addrsize
2881 * @openc prefix
2882 * @opmincpu 80386
2883 * @ophints harmless
2884 * @opgroup og_prefixes
2885 */
2886FNIEMOP_DEF(iemOp_addr_size)
2887{
2888 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2889 IEMOP_HLP_MIN_386();
2890
2891 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2892 switch (pVCpu->iem.s.enmDefAddrMode)
2893 {
2894 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2895 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2896 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2897 default: AssertFailed();
2898 }
2899
2900 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2901 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2902}
2903
2904
2905/**
2906 * @opcode 0x68
2907 */
2908FNIEMOP_DEF(iemOp_push_Iz)
2909{
2910 IEMOP_MNEMONIC(push_Iz, "push Iz");
2911 IEMOP_HLP_MIN_186();
2912 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2913 switch (pVCpu->iem.s.enmEffOpSize)
2914 {
2915 case IEMMODE_16BIT:
2916 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
2917 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2919 IEM_MC_LOCAL_CONST(uint16_t, u16Value, u16Imm);
2920 IEM_MC_PUSH_U16(u16Value);
2921 IEM_MC_ADVANCE_RIP_AND_FINISH();
2922 IEM_MC_END();
2923 break;
2924
2925 case IEMMODE_32BIT:
2926 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2927 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2929 IEM_MC_LOCAL_CONST(uint32_t, u32Value, u32Imm);
2930 IEM_MC_PUSH_U32(u32Value);
2931 IEM_MC_ADVANCE_RIP_AND_FINISH();
2932 IEM_MC_END();
2933 break;
2934
2935 case IEMMODE_64BIT:
2936 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2937 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2939 IEM_MC_LOCAL_CONST(uint64_t, u64Value, u64Imm);
2940 IEM_MC_PUSH_U64(u64Value);
2941 IEM_MC_ADVANCE_RIP_AND_FINISH();
2942 IEM_MC_END();
2943 break;
2944
2945 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2946 }
2947}
2948
2949
2950/**
2951 * @opcode 0x69
2952 */
2953FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2954{
2955 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2956 IEMOP_HLP_MIN_186();
2957 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2958 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2959
2960 switch (pVCpu->iem.s.enmEffOpSize)
2961 {
2962 case IEMMODE_16BIT:
2963 {
2964 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2965 if (IEM_IS_MODRM_REG_MODE(bRm))
2966 {
2967 /* register operand */
2968 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2969 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
2970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2971 IEM_MC_LOCAL(uint16_t, u16Tmp);
2972 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2973 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2974 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
2975 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2976 IEM_MC_REF_EFLAGS(pEFlags);
2977 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2978 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2979
2980 IEM_MC_ADVANCE_RIP_AND_FINISH();
2981 IEM_MC_END();
2982 }
2983 else
2984 {
2985 /* memory operand */
2986 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
2987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2989
2990 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2992
2993 IEM_MC_LOCAL(uint16_t, u16Tmp);
2994 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2995
2996 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2997 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
2998 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2999 IEM_MC_REF_EFLAGS(pEFlags);
3000 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3001 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3002
3003 IEM_MC_ADVANCE_RIP_AND_FINISH();
3004 IEM_MC_END();
3005 }
3006 break;
3007 }
3008
3009 case IEMMODE_32BIT:
3010 {
3011 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3012 if (IEM_IS_MODRM_REG_MODE(bRm))
3013 {
3014 /* register operand */
3015 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3016 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3018 IEM_MC_LOCAL(uint32_t, u32Tmp);
3019 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3020
3021 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3022 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
3023 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3024 IEM_MC_REF_EFLAGS(pEFlags);
3025 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3026 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3027
3028 IEM_MC_ADVANCE_RIP_AND_FINISH();
3029 IEM_MC_END();
3030 }
3031 else
3032 {
3033 /* memory operand */
3034 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3037
3038 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3040
3041 IEM_MC_LOCAL(uint32_t, u32Tmp);
3042 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3043
3044 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3045 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3046 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3047 IEM_MC_REF_EFLAGS(pEFlags);
3048 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3049 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3050
3051 IEM_MC_ADVANCE_RIP_AND_FINISH();
3052 IEM_MC_END();
3053 }
3054 break;
3055 }
3056
3057 case IEMMODE_64BIT:
3058 {
3059 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3060 if (IEM_IS_MODRM_REG_MODE(bRm))
3061 {
3062 /* register operand */
3063 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3064 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3066 IEM_MC_LOCAL(uint64_t, u64Tmp);
3067 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3068
3069 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3070 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
3071 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3072 IEM_MC_REF_EFLAGS(pEFlags);
3073 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3074 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3075
3076 IEM_MC_ADVANCE_RIP_AND_FINISH();
3077 IEM_MC_END();
3078 }
3079 else
3080 {
3081 /* memory operand */
3082 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3085
3086 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /* parameter count for the threaded function for this block. */
3088
3089 IEM_MC_LOCAL(uint64_t, u64Tmp);
3090 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3091
3092 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3093 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int32_t)u32Imm, 1);
3094 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3095 IEM_MC_REF_EFLAGS(pEFlags);
3096 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3097 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3098
3099 IEM_MC_ADVANCE_RIP_AND_FINISH();
3100 IEM_MC_END();
3101 }
3102 break;
3103 }
3104
3105 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3106 }
3107}
3108
3109
3110/**
3111 * @opcode 0x6a
3112 */
3113FNIEMOP_DEF(iemOp_push_Ib)
3114{
3115 IEMOP_MNEMONIC(push_Ib, "push Ib");
3116 IEMOP_HLP_MIN_186();
3117 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3118 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3119
3120 switch (pVCpu->iem.s.enmEffOpSize)
3121 {
3122 case IEMMODE_16BIT:
3123 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_186, 0);
3124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3125 IEM_MC_LOCAL_CONST(uint16_t, uValue, (int16_t)i8Imm);
3126 IEM_MC_PUSH_U16(uValue);
3127 IEM_MC_ADVANCE_RIP_AND_FINISH();
3128 IEM_MC_END();
3129 break;
3130 case IEMMODE_32BIT:
3131 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3133 IEM_MC_LOCAL_CONST(uint32_t, uValue, (int32_t)i8Imm);
3134 IEM_MC_PUSH_U32(uValue);
3135 IEM_MC_ADVANCE_RIP_AND_FINISH();
3136 IEM_MC_END();
3137 break;
3138 case IEMMODE_64BIT:
3139 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
3140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3141 IEM_MC_LOCAL_CONST(uint64_t, uValue, (int64_t)i8Imm);
3142 IEM_MC_PUSH_U64(uValue);
3143 IEM_MC_ADVANCE_RIP_AND_FINISH();
3144 IEM_MC_END();
3145 break;
3146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3147 }
3148}
3149
3150
3151/**
3152 * @opcode 0x6b
3153 */
3154FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3155{
3156 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3157 IEMOP_HLP_MIN_186();
3158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3159 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3160
3161 switch (pVCpu->iem.s.enmEffOpSize)
3162 {
3163 case IEMMODE_16BIT:
3164 {
3165 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3166 if (IEM_IS_MODRM_REG_MODE(bRm))
3167 {
3168 /* register operand */
3169 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3170 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3172
3173 IEM_MC_LOCAL(uint16_t, u16Tmp);
3174 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3175
3176 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3177 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3178 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3179 IEM_MC_REF_EFLAGS(pEFlags);
3180 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3181 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3182
3183 IEM_MC_ADVANCE_RIP_AND_FINISH();
3184 IEM_MC_END();
3185 }
3186 else
3187 {
3188 /* memory operand */
3189 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3190
3191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3193
3194 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3196
3197 IEM_MC_LOCAL(uint16_t, u16Tmp);
3198 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3199
3200 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3201 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3202 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3203 IEM_MC_REF_EFLAGS(pEFlags);
3204 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3205 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3206
3207 IEM_MC_ADVANCE_RIP_AND_FINISH();
3208 IEM_MC_END();
3209 }
3210 break;
3211 }
3212
3213 case IEMMODE_32BIT:
3214 {
3215 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3216 if (IEM_IS_MODRM_REG_MODE(bRm))
3217 {
3218 /* register operand */
3219 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3220 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3222 IEM_MC_LOCAL(uint32_t, u32Tmp);
3223 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3224
3225 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3226 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3227 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3228 IEM_MC_REF_EFLAGS(pEFlags);
3229 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3230 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3231
3232 IEM_MC_ADVANCE_RIP_AND_FINISH();
3233 IEM_MC_END();
3234 }
3235 else
3236 {
3237 /* memory operand */
3238 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3241
3242 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3244
3245 IEM_MC_LOCAL(uint32_t, u32Tmp);
3246 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3247
3248 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3249 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3250 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3251 IEM_MC_REF_EFLAGS(pEFlags);
3252 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3253 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3254
3255 IEM_MC_ADVANCE_RIP_AND_FINISH();
3256 IEM_MC_END();
3257 }
3258 break;
3259 }
3260
3261 case IEMMODE_64BIT:
3262 {
3263 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3264 if (IEM_IS_MODRM_REG_MODE(bRm))
3265 {
3266 /* register operand */
3267 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3268 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3270 IEM_MC_LOCAL(uint64_t, u64Tmp);
3271 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3272
3273 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3274 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3275 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3276 IEM_MC_REF_EFLAGS(pEFlags);
3277 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3278 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3279
3280 IEM_MC_ADVANCE_RIP_AND_FINISH();
3281 IEM_MC_END();
3282 }
3283 else
3284 {
3285 /* memory operand */
3286 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3289
3290 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the threaded parameter count. */
3291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3292
3293 IEM_MC_LOCAL(uint64_t, u64Tmp);
3294 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3295
3296 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3297 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3298 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3299 IEM_MC_REF_EFLAGS(pEFlags);
3300 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3301 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3302
3303 IEM_MC_ADVANCE_RIP_AND_FINISH();
3304 IEM_MC_END();
3305 }
3306 break;
3307 }
3308
3309 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3310 }
3311}
3312
3313
3314/**
3315 * @opcode 0x6c
3316 */
3317FNIEMOP_DEF(iemOp_insb_Yb_DX)
3318{
3319 IEMOP_HLP_MIN_186();
3320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3321 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3322 {
3323 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3324 switch (pVCpu->iem.s.enmEffAddrMode)
3325 {
3326 case IEMMODE_16BIT:
3327 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3328 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3329 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3330 iemCImpl_rep_ins_op8_addr16, false);
3331 case IEMMODE_32BIT:
3332 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3333 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3334 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3335 iemCImpl_rep_ins_op8_addr32, false);
3336 case IEMMODE_64BIT:
3337 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3338 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3339 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3340 iemCImpl_rep_ins_op8_addr64, false);
3341 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3342 }
3343 }
3344 else
3345 {
3346 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3347 switch (pVCpu->iem.s.enmEffAddrMode)
3348 {
3349 case IEMMODE_16BIT:
3350 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3351 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3352 iemCImpl_ins_op8_addr16, false);
3353 case IEMMODE_32BIT:
3354 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3355 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3356 iemCImpl_ins_op8_addr32, false);
3357 case IEMMODE_64BIT:
3358 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3359 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3360 iemCImpl_ins_op8_addr64, false);
3361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3362 }
3363 }
3364}
3365
3366
3367/**
3368 * @opcode 0x6d
3369 */
3370FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3371{
3372 IEMOP_HLP_MIN_186();
3373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3374 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3375 {
3376 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3377 switch (pVCpu->iem.s.enmEffOpSize)
3378 {
3379 case IEMMODE_16BIT:
3380 switch (pVCpu->iem.s.enmEffAddrMode)
3381 {
3382 case IEMMODE_16BIT:
3383 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3384 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3385 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3386 iemCImpl_rep_ins_op16_addr16, false);
3387 case IEMMODE_32BIT:
3388 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3389 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3390 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3391 iemCImpl_rep_ins_op16_addr32, false);
3392 case IEMMODE_64BIT:
3393 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3394 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3395 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3396 iemCImpl_rep_ins_op16_addr64, false);
3397 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3398 }
3399 break;
3400 case IEMMODE_64BIT:
3401 case IEMMODE_32BIT:
3402 switch (pVCpu->iem.s.enmEffAddrMode)
3403 {
3404 case IEMMODE_16BIT:
3405 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3406 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3407 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3408 iemCImpl_rep_ins_op32_addr16, false);
3409 case IEMMODE_32BIT:
3410 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3411 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3412 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3413 iemCImpl_rep_ins_op32_addr32, false);
3414 case IEMMODE_64BIT:
3415 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3416 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3417 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3418 iemCImpl_rep_ins_op32_addr64, false);
3419 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3420 }
3421 break;
3422 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3423 }
3424 }
3425 else
3426 {
3427 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3428 switch (pVCpu->iem.s.enmEffOpSize)
3429 {
3430 case IEMMODE_16BIT:
3431 switch (pVCpu->iem.s.enmEffAddrMode)
3432 {
3433 case IEMMODE_16BIT:
3434 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3435 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3436 iemCImpl_ins_op16_addr16, false);
3437 case IEMMODE_32BIT:
3438 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3439 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3440 iemCImpl_ins_op16_addr32, false);
3441 case IEMMODE_64BIT:
3442 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3443 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3444 iemCImpl_ins_op16_addr64, false);
3445 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3446 }
3447 break;
3448 case IEMMODE_64BIT:
3449 case IEMMODE_32BIT:
3450 switch (pVCpu->iem.s.enmEffAddrMode)
3451 {
3452 case IEMMODE_16BIT:
3453 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3454 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3455 iemCImpl_ins_op32_addr16, false);
3456 case IEMMODE_32BIT:
3457 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3458 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3459 iemCImpl_ins_op32_addr32, false);
3460 case IEMMODE_64BIT:
3461 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3462 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3463 iemCImpl_ins_op32_addr64, false);
3464 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3465 }
3466 break;
3467 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3468 }
3469 }
3470}
3471
3472
3473/**
3474 * @opcode 0x6e
3475 */
3476FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3477{
3478 IEMOP_HLP_MIN_186();
3479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3480 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3481 {
3482 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3483 switch (pVCpu->iem.s.enmEffAddrMode)
3484 {
3485 case IEMMODE_16BIT:
3486 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3487 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3488 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3489 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3490 case IEMMODE_32BIT:
3491 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3492 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3493 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3494 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3495 case IEMMODE_64BIT:
3496 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3497 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3498 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3499 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3500 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3501 }
3502 }
3503 else
3504 {
3505 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3506 switch (pVCpu->iem.s.enmEffAddrMode)
3507 {
3508 case IEMMODE_16BIT:
3509 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3510 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3511 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3512 case IEMMODE_32BIT:
3513 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3514 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3515 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3516 case IEMMODE_64BIT:
3517 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3518 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3519 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3521 }
3522 }
3523}
3524
3525
3526/**
3527 * @opcode 0x6f
3528 */
3529FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3530{
3531 IEMOP_HLP_MIN_186();
3532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3533 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3534 {
3535 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3536 switch (pVCpu->iem.s.enmEffOpSize)
3537 {
3538 case IEMMODE_16BIT:
3539 switch (pVCpu->iem.s.enmEffAddrMode)
3540 {
3541 case IEMMODE_16BIT:
3542 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3543 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3544 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3545 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3546 case IEMMODE_32BIT:
3547 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3548 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3549 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3550 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3551 case IEMMODE_64BIT:
3552 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3553 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3554 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3555 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3556 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3557 }
3558 break;
3559 case IEMMODE_64BIT:
3560 case IEMMODE_32BIT:
3561 switch (pVCpu->iem.s.enmEffAddrMode)
3562 {
3563 case IEMMODE_16BIT:
3564 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3565 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3566 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3567 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3568 case IEMMODE_32BIT:
3569 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3570 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3571 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3572 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3573 case IEMMODE_64BIT:
3574 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3575 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3576 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3577 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3579 }
3580 break;
3581 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3582 }
3583 }
3584 else
3585 {
3586 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3587 switch (pVCpu->iem.s.enmEffOpSize)
3588 {
3589 case IEMMODE_16BIT:
3590 switch (pVCpu->iem.s.enmEffAddrMode)
3591 {
3592 case IEMMODE_16BIT:
3593 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3594 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3595 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3596 case IEMMODE_32BIT:
3597 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3598 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3599 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3600 case IEMMODE_64BIT:
3601 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3602 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3603 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3604 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3605 }
3606 break;
3607 case IEMMODE_64BIT:
3608 case IEMMODE_32BIT:
3609 switch (pVCpu->iem.s.enmEffAddrMode)
3610 {
3611 case IEMMODE_16BIT:
3612 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3613 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3614 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3615 case IEMMODE_32BIT:
3616 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3617 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3618 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3619 case IEMMODE_64BIT:
3620 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3621 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3622 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3623 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3624 }
3625 break;
3626 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3627 }
3628 }
3629}
3630
3631
3632/**
3633 * @opcode 0x70
3634 */
3635FNIEMOP_DEF(iemOp_jo_Jb)
3636{
3637 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3638 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3639 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3640
3641 IEM_MC_BEGIN(0, 0, 0, 0);
3642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3643 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3644 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3645 } IEM_MC_ELSE() {
3646 IEM_MC_ADVANCE_RIP_AND_FINISH();
3647 } IEM_MC_ENDIF();
3648 IEM_MC_END();
3649}
3650
3651
3652/**
3653 * @opcode 0x71
3654 */
3655FNIEMOP_DEF(iemOp_jno_Jb)
3656{
3657 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3658 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3659 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3660
3661 IEM_MC_BEGIN(0, 0, 0, 0);
3662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3663 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3664 IEM_MC_ADVANCE_RIP_AND_FINISH();
3665 } IEM_MC_ELSE() {
3666 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3667 } IEM_MC_ENDIF();
3668 IEM_MC_END();
3669}
3670
3671/**
3672 * @opcode 0x72
3673 */
3674FNIEMOP_DEF(iemOp_jc_Jb)
3675{
3676 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3677 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3678 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3679
3680 IEM_MC_BEGIN(0, 0, 0, 0);
3681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3682 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3683 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3684 } IEM_MC_ELSE() {
3685 IEM_MC_ADVANCE_RIP_AND_FINISH();
3686 } IEM_MC_ENDIF();
3687 IEM_MC_END();
3688}
3689
3690
3691/**
3692 * @opcode 0x73
3693 */
3694FNIEMOP_DEF(iemOp_jnc_Jb)
3695{
3696 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3697 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3698 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3699
3700 IEM_MC_BEGIN(0, 0, 0, 0);
3701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3702 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3703 IEM_MC_ADVANCE_RIP_AND_FINISH();
3704 } IEM_MC_ELSE() {
3705 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3706 } IEM_MC_ENDIF();
3707 IEM_MC_END();
3708}
3709
3710
3711/**
3712 * @opcode 0x74
3713 */
3714FNIEMOP_DEF(iemOp_je_Jb)
3715{
3716 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3717 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3718 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3719
3720 IEM_MC_BEGIN(0, 0, 0, 0);
3721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3722 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3723 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3724 } IEM_MC_ELSE() {
3725 IEM_MC_ADVANCE_RIP_AND_FINISH();
3726 } IEM_MC_ENDIF();
3727 IEM_MC_END();
3728}
3729
3730
3731/**
3732 * @opcode 0x75
3733 */
3734FNIEMOP_DEF(iemOp_jne_Jb)
3735{
3736 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3737 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3738 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3739
3740 IEM_MC_BEGIN(0, 0, 0, 0);
3741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3742 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3743 IEM_MC_ADVANCE_RIP_AND_FINISH();
3744 } IEM_MC_ELSE() {
3745 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3746 } IEM_MC_ENDIF();
3747 IEM_MC_END();
3748}
3749
3750
3751/**
3752 * @opcode 0x76
3753 */
3754FNIEMOP_DEF(iemOp_jbe_Jb)
3755{
3756 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3757 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3758 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3759
3760 IEM_MC_BEGIN(0, 0, 0, 0);
3761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3762 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3763 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3764 } IEM_MC_ELSE() {
3765 IEM_MC_ADVANCE_RIP_AND_FINISH();
3766 } IEM_MC_ENDIF();
3767 IEM_MC_END();
3768}
3769
3770
3771/**
3772 * @opcode 0x77
3773 */
3774FNIEMOP_DEF(iemOp_jnbe_Jb)
3775{
3776 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3777 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3778 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3779
3780 IEM_MC_BEGIN(0, 0, 0, 0);
3781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3782 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3783 IEM_MC_ADVANCE_RIP_AND_FINISH();
3784 } IEM_MC_ELSE() {
3785 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3786 } IEM_MC_ENDIF();
3787 IEM_MC_END();
3788}
3789
3790
3791/**
3792 * @opcode 0x78
3793 */
3794FNIEMOP_DEF(iemOp_js_Jb)
3795{
3796 IEMOP_MNEMONIC(js_Jb, "js Jb");
3797 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3798 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3799
3800 IEM_MC_BEGIN(0, 0, 0, 0);
3801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3802 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3803 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3804 } IEM_MC_ELSE() {
3805 IEM_MC_ADVANCE_RIP_AND_FINISH();
3806 } IEM_MC_ENDIF();
3807 IEM_MC_END();
3808}
3809
3810
3811/**
3812 * @opcode 0x79
3813 */
3814FNIEMOP_DEF(iemOp_jns_Jb)
3815{
3816 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3817 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3818 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3819
3820 IEM_MC_BEGIN(0, 0, 0, 0);
3821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3822 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3823 IEM_MC_ADVANCE_RIP_AND_FINISH();
3824 } IEM_MC_ELSE() {
3825 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3826 } IEM_MC_ENDIF();
3827 IEM_MC_END();
3828}
3829
3830
3831/**
3832 * @opcode 0x7a
3833 */
3834FNIEMOP_DEF(iemOp_jp_Jb)
3835{
3836 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3837 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3838 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3839
3840 IEM_MC_BEGIN(0, 0, 0, 0);
3841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3842 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3843 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3844 } IEM_MC_ELSE() {
3845 IEM_MC_ADVANCE_RIP_AND_FINISH();
3846 } IEM_MC_ENDIF();
3847 IEM_MC_END();
3848}
3849
3850
3851/**
3852 * @opcode 0x7b
3853 */
3854FNIEMOP_DEF(iemOp_jnp_Jb)
3855{
3856 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3857 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3858 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3859
3860 IEM_MC_BEGIN(0, 0, 0, 0);
3861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3862 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3863 IEM_MC_ADVANCE_RIP_AND_FINISH();
3864 } IEM_MC_ELSE() {
3865 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3866 } IEM_MC_ENDIF();
3867 IEM_MC_END();
3868}
3869
3870
3871/**
3872 * @opcode 0x7c
3873 */
3874FNIEMOP_DEF(iemOp_jl_Jb)
3875{
3876 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3877 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3878 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3879
3880 IEM_MC_BEGIN(0, 0, 0, 0);
3881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3882 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3883 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3884 } IEM_MC_ELSE() {
3885 IEM_MC_ADVANCE_RIP_AND_FINISH();
3886 } IEM_MC_ENDIF();
3887 IEM_MC_END();
3888}
3889
3890
3891/**
3892 * @opcode 0x7d
3893 */
3894FNIEMOP_DEF(iemOp_jnl_Jb)
3895{
3896 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3897 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3898 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3899
3900 IEM_MC_BEGIN(0, 0, 0, 0);
3901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3902 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3903 IEM_MC_ADVANCE_RIP_AND_FINISH();
3904 } IEM_MC_ELSE() {
3905 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3906 } IEM_MC_ENDIF();
3907 IEM_MC_END();
3908}
3909
3910
3911/**
3912 * @opcode 0x7e
3913 */
3914FNIEMOP_DEF(iemOp_jle_Jb)
3915{
3916 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3917 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3918 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3919
3920 IEM_MC_BEGIN(0, 0, 0, 0);
3921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3922 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3923 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3924 } IEM_MC_ELSE() {
3925 IEM_MC_ADVANCE_RIP_AND_FINISH();
3926 } IEM_MC_ENDIF();
3927 IEM_MC_END();
3928}
3929
3930
3931/**
3932 * @opcode 0x7f
3933 */
3934FNIEMOP_DEF(iemOp_jnle_Jb)
3935{
3936 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3937 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3938 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3939
3940 IEM_MC_BEGIN(0, 0, 0, 0);
3941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3942 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3943 IEM_MC_ADVANCE_RIP_AND_FINISH();
3944 } IEM_MC_ELSE() {
3945 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3946 } IEM_MC_ENDIF();
3947 IEM_MC_END();
3948}
3949
3950
3951/**
3952 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3953 * iemOp_Grp1_Eb_Ib_80.
3954 */
3955#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
3956 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3957 { \
3958 /* register target */ \
3959 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3960 IEM_MC_BEGIN(3, 0, 0, 0); \
3961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3962 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3963 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3964 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3965 \
3966 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3967 IEM_MC_REF_EFLAGS(pEFlags); \
3968 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3969 \
3970 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3971 IEM_MC_END(); \
3972 } \
3973 else \
3974 { \
3975 /* memory target */ \
3976 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3977 { \
3978 IEM_MC_BEGIN(3, 3, 0, 0); \
3979 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3980 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3982 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3983 \
3984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3985 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3986 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3987 IEMOP_HLP_DONE_DECODING(); \
3988 \
3989 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3990 IEM_MC_FETCH_EFLAGS(EFlags); \
3991 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3992 \
3993 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
3994 IEM_MC_COMMIT_EFLAGS(EFlags); \
3995 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3996 IEM_MC_END(); \
3997 } \
3998 else \
3999 { \
4000 (void)0
4001
4002#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
4003 IEM_MC_BEGIN(3, 3, 0, 0); \
4004 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4005 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4007 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4008 \
4009 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4010 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4011 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4012 IEMOP_HLP_DONE_DECODING(); \
4013 \
4014 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4015 IEM_MC_FETCH_EFLAGS(EFlags); \
4016 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
4017 \
4018 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
4019 IEM_MC_COMMIT_EFLAGS(EFlags); \
4020 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4021 IEM_MC_END(); \
4022 } \
4023 } \
4024 (void)0
4025
4026#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
4027 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4028 { \
4029 /* register target */ \
4030 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4031 IEM_MC_BEGIN(3, 0, 0, 0); \
4032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4033 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4034 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4035 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4036 \
4037 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4038 IEM_MC_REF_EFLAGS(pEFlags); \
4039 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4040 \
4041 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4042 IEM_MC_END(); \
4043 } \
4044 else \
4045 { \
4046 /* memory target */ \
4047 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4048 { \
4049 IEM_MC_BEGIN(3, 3, 0, 0); \
4050 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
4051 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4053 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4054 \
4055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4056 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4057 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4058 IEMOP_HLP_DONE_DECODING(); \
4059 \
4060 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4061 IEM_MC_FETCH_EFLAGS(EFlags); \
4062 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4063 \
4064 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
4065 IEM_MC_COMMIT_EFLAGS(EFlags); \
4066 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4067 IEM_MC_END(); \
4068 } \
4069 else \
4070 { \
4071 (void)0
4072
4073#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
4074 IEMOP_HLP_DONE_DECODING(); \
4075 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4076 } \
4077 } \
4078 (void)0
4079
4080
4081
4082/**
4083 * @opmaps grp1_80,grp1_83
4084 * @opcode /0
4085 */
4086FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
4087{
4088 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
4089 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
4090 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
4091}
4092
4093
4094/**
4095 * @opmaps grp1_80,grp1_83
4096 * @opcode /1
4097 */
4098FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
4099{
4100 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
4101 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
4102 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
4103}
4104
4105
4106/**
4107 * @opmaps grp1_80,grp1_83
4108 * @opcode /2
4109 */
4110FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4111{
4112 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4113 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4114 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4115}
4116
4117
4118/**
4119 * @opmaps grp1_80,grp1_83
4120 * @opcode /3
4121 */
4122FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4123{
4124 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4125 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4126 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4127}
4128
4129
4130/**
4131 * @opmaps grp1_80,grp1_83
4132 * @opcode /4
4133 */
4134FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4135{
4136 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4137 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4138 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4139}
4140
4141
4142/**
4143 * @opmaps grp1_80,grp1_83
4144 * @opcode /5
4145 */
4146FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4147{
4148 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4149 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4150 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4151}
4152
4153
4154/**
4155 * @opmaps grp1_80,grp1_83
4156 * @opcode /6
4157 */
4158FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4159{
4160 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4161 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4162 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4163}
4164
4165
4166/**
4167 * @opmaps grp1_80,grp1_83
4168 * @opcode /7
4169 */
4170FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4171{
4172 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4173 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4174 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4175}
4176
4177
4178/**
4179 * @opcode 0x80
4180 */
4181FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4182{
4183 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4184 switch (IEM_GET_MODRM_REG_8(bRm))
4185 {
4186 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4187 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4188 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4189 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4190 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4191 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4192 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4193 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4194 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4195 }
4196}
4197
4198
4199/**
4200 * Body for a group 1 binary operator.
4201 */
4202#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4203 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4204 { \
4205 /* register target */ \
4206 switch (pVCpu->iem.s.enmEffOpSize) \
4207 { \
4208 case IEMMODE_16BIT: \
4209 { \
4210 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4211 IEM_MC_BEGIN(3, 0, 0, 0); \
4212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4213 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4214 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4215 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4216 \
4217 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4218 IEM_MC_REF_EFLAGS(pEFlags); \
4219 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4220 \
4221 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4222 IEM_MC_END(); \
4223 break; \
4224 } \
4225 \
4226 case IEMMODE_32BIT: \
4227 { \
4228 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4229 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4231 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4232 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4233 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4234 \
4235 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4236 IEM_MC_REF_EFLAGS(pEFlags); \
4237 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4238 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4239 \
4240 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4241 IEM_MC_END(); \
4242 break; \
4243 } \
4244 \
4245 case IEMMODE_64BIT: \
4246 { \
4247 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4248 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4250 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4251 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4252 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4253 \
4254 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4255 IEM_MC_REF_EFLAGS(pEFlags); \
4256 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4257 \
4258 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4259 IEM_MC_END(); \
4260 break; \
4261 } \
4262 \
4263 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4264 } \
4265 } \
4266 else \
4267 { \
4268 /* memory target */ \
4269 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4270 { \
4271 switch (pVCpu->iem.s.enmEffOpSize) \
4272 { \
4273 case IEMMODE_16BIT: \
4274 { \
4275 IEM_MC_BEGIN(3, 3, 0, 0); \
4276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4278 \
4279 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4280 IEMOP_HLP_DONE_DECODING(); \
4281 \
4282 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4283 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4284 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4285 \
4286 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4287 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4288 IEM_MC_FETCH_EFLAGS(EFlags); \
4289 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4290 \
4291 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4292 IEM_MC_COMMIT_EFLAGS(EFlags); \
4293 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4294 IEM_MC_END(); \
4295 break; \
4296 } \
4297 \
4298 case IEMMODE_32BIT: \
4299 { \
4300 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4303 \
4304 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4305 IEMOP_HLP_DONE_DECODING(); \
4306 \
4307 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4308 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4309 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4310 \
4311 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4312 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4313 IEM_MC_FETCH_EFLAGS(EFlags); \
4314 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4315 \
4316 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4317 IEM_MC_COMMIT_EFLAGS(EFlags); \
4318 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4319 IEM_MC_END(); \
4320 break; \
4321 } \
4322 \
4323 case IEMMODE_64BIT: \
4324 { \
4325 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4326 \
4327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4329 \
4330 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4331 IEMOP_HLP_DONE_DECODING(); \
4332 \
4333 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4334 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4335 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4336 \
4337 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4338 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4339 IEM_MC_FETCH_EFLAGS(EFlags); \
4340 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4341 \
4342 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4343 IEM_MC_COMMIT_EFLAGS(EFlags); \
4344 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4345 IEM_MC_END(); \
4346 break; \
4347 } \
4348 \
4349 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4350 } \
4351 } \
4352 else \
4353 { \
4354 (void)0
4355/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4356#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4357 switch (pVCpu->iem.s.enmEffOpSize) \
4358 { \
4359 case IEMMODE_16BIT: \
4360 { \
4361 IEM_MC_BEGIN(3, 3, 0, 0); \
4362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4364 \
4365 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4366 IEMOP_HLP_DONE_DECODING(); \
4367 \
4368 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4369 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4370 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4371 \
4372 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4373 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4374 IEM_MC_FETCH_EFLAGS(EFlags); \
4375 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4376 \
4377 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4378 IEM_MC_COMMIT_EFLAGS(EFlags); \
4379 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4380 IEM_MC_END(); \
4381 break; \
4382 } \
4383 \
4384 case IEMMODE_32BIT: \
4385 { \
4386 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4389 \
4390 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4391 IEMOP_HLP_DONE_DECODING(); \
4392 \
4393 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4394 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4395 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4396 \
4397 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4398 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4399 IEM_MC_FETCH_EFLAGS(EFlags); \
4400 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4401 \
4402 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4403 IEM_MC_COMMIT_EFLAGS(EFlags); \
4404 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4405 IEM_MC_END(); \
4406 break; \
4407 } \
4408 \
4409 case IEMMODE_64BIT: \
4410 { \
4411 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4414 \
4415 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4416 IEMOP_HLP_DONE_DECODING(); \
4417 \
4418 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4419 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4420 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4421 \
4422 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4423 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4424 IEM_MC_FETCH_EFLAGS(EFlags); \
4425 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4426 \
4427 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4428 IEM_MC_COMMIT_EFLAGS(EFlags); \
4429 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4430 IEM_MC_END(); \
4431 break; \
4432 } \
4433 \
4434 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4435 } \
4436 } \
4437 } \
4438 (void)0
4439
4440/* read-only version */
4441#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4442 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4443 { \
4444 /* register target */ \
4445 switch (pVCpu->iem.s.enmEffOpSize) \
4446 { \
4447 case IEMMODE_16BIT: \
4448 { \
4449 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4450 IEM_MC_BEGIN(3, 0, 0, 0); \
4451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4452 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4453 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4454 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4455 \
4456 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4457 IEM_MC_REF_EFLAGS(pEFlags); \
4458 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4459 \
4460 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4461 IEM_MC_END(); \
4462 break; \
4463 } \
4464 \
4465 case IEMMODE_32BIT: \
4466 { \
4467 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4468 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4470 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4471 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4472 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4473 \
4474 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4475 IEM_MC_REF_EFLAGS(pEFlags); \
4476 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4477 \
4478 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4479 IEM_MC_END(); \
4480 break; \
4481 } \
4482 \
4483 case IEMMODE_64BIT: \
4484 { \
4485 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4486 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4488 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4489 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4490 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4491 \
4492 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4493 IEM_MC_REF_EFLAGS(pEFlags); \
4494 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4495 \
4496 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4497 IEM_MC_END(); \
4498 break; \
4499 } \
4500 \
4501 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4502 } \
4503 } \
4504 else \
4505 { \
4506 /* memory target */ \
4507 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4508 { \
4509 switch (pVCpu->iem.s.enmEffOpSize) \
4510 { \
4511 case IEMMODE_16BIT: \
4512 { \
4513 IEM_MC_BEGIN(3, 3, 0, 0); \
4514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4516 \
4517 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4518 IEMOP_HLP_DONE_DECODING(); \
4519 \
4520 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4521 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4522 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4523 \
4524 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4525 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4526 IEM_MC_FETCH_EFLAGS(EFlags); \
4527 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4528 \
4529 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
4530 IEM_MC_COMMIT_EFLAGS(EFlags); \
4531 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4532 IEM_MC_END(); \
4533 break; \
4534 } \
4535 \
4536 case IEMMODE_32BIT: \
4537 { \
4538 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4541 \
4542 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4543 IEMOP_HLP_DONE_DECODING(); \
4544 \
4545 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4546 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4547 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4548 \
4549 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4550 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4551 IEM_MC_FETCH_EFLAGS(EFlags); \
4552 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4553 \
4554 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
4555 IEM_MC_COMMIT_EFLAGS(EFlags); \
4556 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4557 IEM_MC_END(); \
4558 break; \
4559 } \
4560 \
4561 case IEMMODE_64BIT: \
4562 { \
4563 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4566 \
4567 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4568 IEMOP_HLP_DONE_DECODING(); \
4569 \
4570 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4571 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4572 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4573 \
4574 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4575 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4576 IEM_MC_FETCH_EFLAGS(EFlags); \
4577 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4578 \
4579 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
4580 IEM_MC_COMMIT_EFLAGS(EFlags); \
4581 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4582 IEM_MC_END(); \
4583 break; \
4584 } \
4585 \
4586 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4587 } \
4588 } \
4589 else \
4590 { \
4591 IEMOP_HLP_DONE_DECODING(); \
4592 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4593 } \
4594 } \
4595 (void)0
4596
4597
4598/**
4599 * @opmaps grp1_81
4600 * @opcode /0
4601 */
4602FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4603{
4604 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4605 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4606 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4607}
4608
4609
4610/**
4611 * @opmaps grp1_81
4612 * @opcode /1
4613 */
4614FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4615{
4616 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4617 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4618 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4619}
4620
4621
4622/**
4623 * @opmaps grp1_81
4624 * @opcode /2
4625 */
4626FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4627{
4628 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4629 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4630 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4631}
4632
4633
4634/**
4635 * @opmaps grp1_81
4636 * @opcode /3
4637 */
4638FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4639{
4640 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4641 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4642 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4643}
4644
4645
4646/**
4647 * @opmaps grp1_81
4648 * @opcode /4
4649 */
4650FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4651{
4652 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4653 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4654 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4655}
4656
4657
4658/**
4659 * @opmaps grp1_81
4660 * @opcode /5
4661 */
4662FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4663{
4664 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4665 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4666 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4667}
4668
4669
4670/**
4671 * @opmaps grp1_81
4672 * @opcode /6
4673 */
4674FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4675{
4676 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4677 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4678 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4679}
4680
4681
4682/**
4683 * @opmaps grp1_81
4684 * @opcode /7
4685 */
4686FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4687{
4688 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4689 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4690}
4691
4692
4693/**
4694 * @opcode 0x81
4695 */
4696FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4697{
4698 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4699 switch (IEM_GET_MODRM_REG_8(bRm))
4700 {
4701 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4702 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4703 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4704 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4705 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4706 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4707 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4708 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4709 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4710 }
4711}
4712
4713
4714/**
4715 * @opcode 0x82
4716 * @opmnemonic grp1_82
4717 * @opgroup og_groups
4718 */
4719FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4720{
4721 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4722 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4723}
4724
4725
4726/**
4727 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4728 * iemOp_Grp1_Ev_Ib.
4729 */
4730#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4731 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4732 { \
4733 /* \
4734 * Register target \
4735 */ \
4736 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4737 switch (pVCpu->iem.s.enmEffOpSize) \
4738 { \
4739 case IEMMODE_16BIT: \
4740 IEM_MC_BEGIN(3, 0, 0, 0); \
4741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4742 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4743 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4744 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4745 \
4746 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4747 IEM_MC_REF_EFLAGS(pEFlags); \
4748 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4749 \
4750 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4751 IEM_MC_END(); \
4752 break; \
4753 \
4754 case IEMMODE_32BIT: \
4755 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4757 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4758 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4759 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4760 \
4761 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4762 IEM_MC_REF_EFLAGS(pEFlags); \
4763 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4764 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4765 \
4766 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4767 IEM_MC_END(); \
4768 break; \
4769 \
4770 case IEMMODE_64BIT: \
4771 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4773 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4774 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4775 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4776 \
4777 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4778 IEM_MC_REF_EFLAGS(pEFlags); \
4779 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4780 \
4781 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4782 IEM_MC_END(); \
4783 break; \
4784 \
4785 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4786 } \
4787 } \
4788 else \
4789 { \
4790 /* \
4791 * Memory target. \
4792 */ \
4793 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4794 { \
4795 switch (pVCpu->iem.s.enmEffOpSize) \
4796 { \
4797 case IEMMODE_16BIT: \
4798 IEM_MC_BEGIN(3, 3, 0, 0); \
4799 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4801 \
4802 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4803 IEMOP_HLP_DONE_DECODING(); \
4804 \
4805 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4806 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4807 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4808 \
4809 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4810 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4811 IEM_MC_FETCH_EFLAGS(EFlags); \
4812 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4813 \
4814 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4815 IEM_MC_COMMIT_EFLAGS(EFlags); \
4816 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4817 IEM_MC_END(); \
4818 break; \
4819 \
4820 case IEMMODE_32BIT: \
4821 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4822 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4824 \
4825 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4826 IEMOP_HLP_DONE_DECODING(); \
4827 \
4828 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4829 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4830 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4831 \
4832 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4833 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4834 IEM_MC_FETCH_EFLAGS(EFlags); \
4835 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4836 \
4837 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4838 IEM_MC_COMMIT_EFLAGS(EFlags); \
4839 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4840 IEM_MC_END(); \
4841 break; \
4842 \
4843 case IEMMODE_64BIT: \
4844 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4847 \
4848 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4849 IEMOP_HLP_DONE_DECODING(); \
4850 \
4851 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4852 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4853 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4854 \
4855 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4856 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4857 IEM_MC_FETCH_EFLAGS(EFlags); \
4858 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4859 \
4860 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4861 IEM_MC_COMMIT_EFLAGS(EFlags); \
4862 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4863 IEM_MC_END(); \
4864 break; \
4865 \
4866 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4867 } \
4868 } \
4869 else \
4870 { \
4871 (void)0
4872/* Separate macro to work around parsing issue in IEMAllInstPython.py */
4873#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4874 switch (pVCpu->iem.s.enmEffOpSize) \
4875 { \
4876 case IEMMODE_16BIT: \
4877 IEM_MC_BEGIN(3, 3, 0, 0); \
4878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4880 \
4881 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4882 IEMOP_HLP_DONE_DECODING(); \
4883 \
4884 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4885 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4886 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4887 \
4888 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4889 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4890 IEM_MC_FETCH_EFLAGS(EFlags); \
4891 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4892 \
4893 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4894 IEM_MC_COMMIT_EFLAGS(EFlags); \
4895 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4896 IEM_MC_END(); \
4897 break; \
4898 \
4899 case IEMMODE_32BIT: \
4900 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4903 \
4904 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4905 IEMOP_HLP_DONE_DECODING(); \
4906 \
4907 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4908 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4909 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4910 \
4911 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4912 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4913 IEM_MC_FETCH_EFLAGS(EFlags); \
4914 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4915 \
4916 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4917 IEM_MC_COMMIT_EFLAGS(EFlags); \
4918 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4919 IEM_MC_END(); \
4920 break; \
4921 \
4922 case IEMMODE_64BIT: \
4923 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4924 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4926 \
4927 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4928 IEMOP_HLP_DONE_DECODING(); \
4929 \
4930 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4931 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4932 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4933 \
4934 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4935 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4936 IEM_MC_FETCH_EFLAGS(EFlags); \
4937 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4938 \
4939 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4940 IEM_MC_COMMIT_EFLAGS(EFlags); \
4941 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4942 IEM_MC_END(); \
4943 break; \
4944 \
4945 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4946 } \
4947 } \
4948 } \
4949 (void)0
4950
4951/* read-only variant */
4952#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4953 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4954 { \
4955 /* \
4956 * Register target \
4957 */ \
4958 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4959 switch (pVCpu->iem.s.enmEffOpSize) \
4960 { \
4961 case IEMMODE_16BIT: \
4962 IEM_MC_BEGIN(3, 0, 0, 0); \
4963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4964 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4965 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4966 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4967 \
4968 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4969 IEM_MC_REF_EFLAGS(pEFlags); \
4970 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4971 \
4972 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4973 IEM_MC_END(); \
4974 break; \
4975 \
4976 case IEMMODE_32BIT: \
4977 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4979 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4980 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4981 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4982 \
4983 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4984 IEM_MC_REF_EFLAGS(pEFlags); \
4985 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4986 \
4987 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4988 IEM_MC_END(); \
4989 break; \
4990 \
4991 case IEMMODE_64BIT: \
4992 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4994 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4995 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4996 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4997 \
4998 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4999 IEM_MC_REF_EFLAGS(pEFlags); \
5000 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5001 \
5002 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5003 IEM_MC_END(); \
5004 break; \
5005 \
5006 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5007 } \
5008 } \
5009 else \
5010 { \
5011 /* \
5012 * Memory target. \
5013 */ \
5014 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
5015 { \
5016 switch (pVCpu->iem.s.enmEffOpSize) \
5017 { \
5018 case IEMMODE_16BIT: \
5019 IEM_MC_BEGIN(3, 3, 0, 0); \
5020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5022 \
5023 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5024 IEMOP_HLP_DONE_DECODING(); \
5025 \
5026 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5027 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
5028 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5029 \
5030 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
5031 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5032 IEM_MC_FETCH_EFLAGS(EFlags); \
5033 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
5034 \
5035 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
5036 IEM_MC_COMMIT_EFLAGS(EFlags); \
5037 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5038 IEM_MC_END(); \
5039 break; \
5040 \
5041 case IEMMODE_32BIT: \
5042 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
5043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5045 \
5046 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5047 IEMOP_HLP_DONE_DECODING(); \
5048 \
5049 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5050 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
5051 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5052 \
5053 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
5054 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5055 IEM_MC_FETCH_EFLAGS(EFlags); \
5056 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
5057 \
5058 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
5059 IEM_MC_COMMIT_EFLAGS(EFlags); \
5060 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5061 IEM_MC_END(); \
5062 break; \
5063 \
5064 case IEMMODE_64BIT: \
5065 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5068 \
5069 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5070 IEMOP_HLP_DONE_DECODING(); \
5071 \
5072 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5073 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
5074 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5075 \
5076 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
5077 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5078 IEM_MC_FETCH_EFLAGS(EFlags); \
5079 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5080 \
5081 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
5082 IEM_MC_COMMIT_EFLAGS(EFlags); \
5083 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5084 IEM_MC_END(); \
5085 break; \
5086 \
5087 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5088 } \
5089 } \
5090 else \
5091 { \
5092 IEMOP_HLP_DONE_DECODING(); \
5093 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5094 } \
5095 } \
5096 (void)0
5097
5098/**
5099 * @opmaps grp1_83
5100 * @opcode /0
5101 */
5102FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5103{
5104 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5105 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
5106 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
5107}
5108
5109
5110/**
5111 * @opmaps grp1_83
5112 * @opcode /1
5113 */
5114FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5115{
5116 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5117 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
5118 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
5119}
5120
5121
5122/**
5123 * @opmaps grp1_83
5124 * @opcode /2
5125 */
5126FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5127{
5128 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5129 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5130 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5131}
5132
5133
5134/**
5135 * @opmaps grp1_83
5136 * @opcode /3
5137 */
5138FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5139{
5140 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5141 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5142 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5143}
5144
5145
5146/**
5147 * @opmaps grp1_83
5148 * @opcode /4
5149 */
5150FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5151{
5152 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5153 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5154 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5155}
5156
5157
5158/**
5159 * @opmaps grp1_83
5160 * @opcode /5
5161 */
5162FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5163{
5164 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5165 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5166 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5167}
5168
5169
5170/**
5171 * @opmaps grp1_83
5172 * @opcode /6
5173 */
5174FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5175{
5176 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5177 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5178 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5179}
5180
5181
5182/**
5183 * @opmaps grp1_83
5184 * @opcode /7
5185 */
5186FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5187{
5188 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5189 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5190}
5191
5192
5193/**
5194 * @opcode 0x83
5195 */
5196FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5197{
5198 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5199 to the 386 even if absent in the intel reference manuals and some
5200 3rd party opcode listings. */
5201 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5202 switch (IEM_GET_MODRM_REG_8(bRm))
5203 {
5204 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5205 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5206 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5207 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5208 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5209 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5210 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5211 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5212 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5213 }
5214}
5215
5216
5217/**
5218 * @opcode 0x84
5219 */
5220FNIEMOP_DEF(iemOp_test_Eb_Gb)
5221{
5222 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5223 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5224 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8);
5225 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
5226}
5227
5228
5229/**
5230 * @opcode 0x85
5231 */
5232FNIEMOP_DEF(iemOp_test_Ev_Gv)
5233{
5234 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5235 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5236 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64);
5237}
5238
5239
5240/**
5241 * @opcode 0x86
5242 */
5243FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5244{
5245 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5246 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5247
5248 /*
5249 * If rm is denoting a register, no more instruction bytes.
5250 */
5251 if (IEM_IS_MODRM_REG_MODE(bRm))
5252 {
5253 IEM_MC_BEGIN(0, 2, 0, 0);
5254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5255 IEM_MC_LOCAL(uint8_t, uTmp1);
5256 IEM_MC_LOCAL(uint8_t, uTmp2);
5257
5258 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5259 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5260 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5261 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5262
5263 IEM_MC_ADVANCE_RIP_AND_FINISH();
5264 IEM_MC_END();
5265 }
5266 else
5267 {
5268 /*
5269 * We're accessing memory.
5270 */
5271 IEM_MC_BEGIN(2, 4, 0, 0);
5272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5273 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5274 IEM_MC_LOCAL(uint8_t, uTmpReg);
5275 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
5276 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1);
5277
5278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5280 IEM_MC_MEM_MAP_U8_RW(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5281 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5282 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5283 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
5284 else
5285 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
5286 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Mem, bUnmapInfo);
5287 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5288
5289 IEM_MC_ADVANCE_RIP_AND_FINISH();
5290 IEM_MC_END();
5291 }
5292}
5293
5294
5295/**
5296 * @opcode 0x87
5297 */
5298FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5299{
5300 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5301 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5302
5303 /*
5304 * If rm is denoting a register, no more instruction bytes.
5305 */
5306 if (IEM_IS_MODRM_REG_MODE(bRm))
5307 {
5308 switch (pVCpu->iem.s.enmEffOpSize)
5309 {
5310 case IEMMODE_16BIT:
5311 IEM_MC_BEGIN(0, 2, 0, 0);
5312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5313 IEM_MC_LOCAL(uint16_t, uTmp1);
5314 IEM_MC_LOCAL(uint16_t, uTmp2);
5315
5316 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5317 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5318 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5319 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5320
5321 IEM_MC_ADVANCE_RIP_AND_FINISH();
5322 IEM_MC_END();
5323 break;
5324
5325 case IEMMODE_32BIT:
5326 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5328 IEM_MC_LOCAL(uint32_t, uTmp1);
5329 IEM_MC_LOCAL(uint32_t, uTmp2);
5330
5331 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5332 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5333 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5334 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5335
5336 IEM_MC_ADVANCE_RIP_AND_FINISH();
5337 IEM_MC_END();
5338 break;
5339
5340 case IEMMODE_64BIT:
5341 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5343 IEM_MC_LOCAL(uint64_t, uTmp1);
5344 IEM_MC_LOCAL(uint64_t, uTmp2);
5345
5346 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5347 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5348 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5349 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5350
5351 IEM_MC_ADVANCE_RIP_AND_FINISH();
5352 IEM_MC_END();
5353 break;
5354
5355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5356 }
5357 }
5358 else
5359 {
5360 /*
5361 * We're accessing memory.
5362 */
5363 switch (pVCpu->iem.s.enmEffOpSize)
5364 {
5365 case IEMMODE_16BIT:
5366 IEM_MC_BEGIN(2, 4, 0, 0);
5367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5368 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5369 IEM_MC_LOCAL(uint16_t, uTmpReg);
5370 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
5371 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1);
5372
5373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5375 IEM_MC_MEM_MAP_U16_RW(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5376 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5377 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5378 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
5379 else
5380 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
5381 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Mem, bUnmapInfo);
5382 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5383
5384 IEM_MC_ADVANCE_RIP_AND_FINISH();
5385 IEM_MC_END();
5386 break;
5387
5388 case IEMMODE_32BIT:
5389 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0);
5390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5391 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5392 IEM_MC_LOCAL(uint32_t, uTmpReg);
5393 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
5394 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1);
5395
5396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5398 IEM_MC_MEM_MAP_U32_RW(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5399 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5400 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5401 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
5402 else
5403 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
5404 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Mem, bUnmapInfo);
5405 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5406
5407 IEM_MC_ADVANCE_RIP_AND_FINISH();
5408 IEM_MC_END();
5409 break;
5410
5411 case IEMMODE_64BIT:
5412 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0);
5413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5414 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5415 IEM_MC_LOCAL(uint64_t, uTmpReg);
5416 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
5417 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1);
5418
5419 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5421 IEM_MC_MEM_MAP_U64_RW(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5422 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5423 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5424 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
5425 else
5426 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
5427 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Mem, bUnmapInfo);
5428 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5429
5430 IEM_MC_ADVANCE_RIP_AND_FINISH();
5431 IEM_MC_END();
5432 break;
5433
5434 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5435 }
5436 }
5437}
5438
5439
5440/**
5441 * @opcode 0x88
5442 */
5443FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5444{
5445 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5446
5447 uint8_t bRm;
5448 IEM_OPCODE_GET_NEXT_U8(&bRm);
5449
5450 /*
5451 * If rm is denoting a register, no more instruction bytes.
5452 */
5453 if (IEM_IS_MODRM_REG_MODE(bRm))
5454 {
5455 IEM_MC_BEGIN(0, 1, 0, 0);
5456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5457 IEM_MC_LOCAL(uint8_t, u8Value);
5458 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5459 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5460 IEM_MC_ADVANCE_RIP_AND_FINISH();
5461 IEM_MC_END();
5462 }
5463 else
5464 {
5465 /*
5466 * We're writing a register to memory.
5467 */
5468 IEM_MC_BEGIN(0, 2, 0, 0);
5469 IEM_MC_LOCAL(uint8_t, u8Value);
5470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5473 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5474 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5475 IEM_MC_ADVANCE_RIP_AND_FINISH();
5476 IEM_MC_END();
5477 }
5478}
5479
5480
5481/**
5482 * @opcode 0x89
5483 */
5484FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5485{
5486 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5487
5488 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5489
5490 /*
5491 * If rm is denoting a register, no more instruction bytes.
5492 */
5493 if (IEM_IS_MODRM_REG_MODE(bRm))
5494 {
5495 switch (pVCpu->iem.s.enmEffOpSize)
5496 {
5497 case IEMMODE_16BIT:
5498 IEM_MC_BEGIN(0, 1, 0, 0);
5499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5500 IEM_MC_LOCAL(uint16_t, u16Value);
5501 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5502 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5503 IEM_MC_ADVANCE_RIP_AND_FINISH();
5504 IEM_MC_END();
5505 break;
5506
5507 case IEMMODE_32BIT:
5508 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5510 IEM_MC_LOCAL(uint32_t, u32Value);
5511 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5512 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5513 IEM_MC_ADVANCE_RIP_AND_FINISH();
5514 IEM_MC_END();
5515 break;
5516
5517 case IEMMODE_64BIT:
5518 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5520 IEM_MC_LOCAL(uint64_t, u64Value);
5521 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5522 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5523 IEM_MC_ADVANCE_RIP_AND_FINISH();
5524 IEM_MC_END();
5525 break;
5526
5527 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5528 }
5529 }
5530 else
5531 {
5532 /*
5533 * We're writing a register to memory.
5534 */
5535 switch (pVCpu->iem.s.enmEffOpSize)
5536 {
5537 case IEMMODE_16BIT:
5538 IEM_MC_BEGIN(0, 2, 0, 0);
5539 IEM_MC_LOCAL(uint16_t, u16Value);
5540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5543 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5544 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5545 IEM_MC_ADVANCE_RIP_AND_FINISH();
5546 IEM_MC_END();
5547 break;
5548
5549 case IEMMODE_32BIT:
5550 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5551 IEM_MC_LOCAL(uint32_t, u32Value);
5552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5555 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5556 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5557 IEM_MC_ADVANCE_RIP_AND_FINISH();
5558 IEM_MC_END();
5559 break;
5560
5561 case IEMMODE_64BIT:
5562 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5563 IEM_MC_LOCAL(uint64_t, u64Value);
5564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5567 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5568 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5569 IEM_MC_ADVANCE_RIP_AND_FINISH();
5570 IEM_MC_END();
5571 break;
5572
5573 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5574 }
5575 }
5576}
5577
5578
5579/**
5580 * @opcode 0x8a
5581 */
5582FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5583{
5584 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5585
5586 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5587
5588 /*
5589 * If rm is denoting a register, no more instruction bytes.
5590 */
5591 if (IEM_IS_MODRM_REG_MODE(bRm))
5592 {
5593 IEM_MC_BEGIN(0, 1, 0, 0);
5594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5595 IEM_MC_LOCAL(uint8_t, u8Value);
5596 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5597 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5598 IEM_MC_ADVANCE_RIP_AND_FINISH();
5599 IEM_MC_END();
5600 }
5601 else
5602 {
5603 /*
5604 * We're loading a register from memory.
5605 */
5606 IEM_MC_BEGIN(0, 2, 0, 0);
5607 IEM_MC_LOCAL(uint8_t, u8Value);
5608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5611 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5612 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5613 IEM_MC_ADVANCE_RIP_AND_FINISH();
5614 IEM_MC_END();
5615 }
5616}
5617
5618
5619/**
5620 * @opcode 0x8b
5621 */
5622FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5623{
5624 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5625
5626 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5627
5628 /*
5629 * If rm is denoting a register, no more instruction bytes.
5630 */
5631 if (IEM_IS_MODRM_REG_MODE(bRm))
5632 {
5633 switch (pVCpu->iem.s.enmEffOpSize)
5634 {
5635 case IEMMODE_16BIT:
5636 IEM_MC_BEGIN(0, 1, 0, 0);
5637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5638 IEM_MC_LOCAL(uint16_t, u16Value);
5639 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5640 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5641 IEM_MC_ADVANCE_RIP_AND_FINISH();
5642 IEM_MC_END();
5643 break;
5644
5645 case IEMMODE_32BIT:
5646 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5648 IEM_MC_LOCAL(uint32_t, u32Value);
5649 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5650 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5651 IEM_MC_ADVANCE_RIP_AND_FINISH();
5652 IEM_MC_END();
5653 break;
5654
5655 case IEMMODE_64BIT:
5656 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5658 IEM_MC_LOCAL(uint64_t, u64Value);
5659 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5660 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5661 IEM_MC_ADVANCE_RIP_AND_FINISH();
5662 IEM_MC_END();
5663 break;
5664
5665 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5666 }
5667 }
5668 else
5669 {
5670 /*
5671 * We're loading a register from memory.
5672 */
5673 switch (pVCpu->iem.s.enmEffOpSize)
5674 {
5675 case IEMMODE_16BIT:
5676 IEM_MC_BEGIN(0, 2, 0, 0);
5677 IEM_MC_LOCAL(uint16_t, u16Value);
5678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5679 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5681 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5682 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5683 IEM_MC_ADVANCE_RIP_AND_FINISH();
5684 IEM_MC_END();
5685 break;
5686
5687 case IEMMODE_32BIT:
5688 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5689 IEM_MC_LOCAL(uint32_t, u32Value);
5690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5693 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5694 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5695 IEM_MC_ADVANCE_RIP_AND_FINISH();
5696 IEM_MC_END();
5697 break;
5698
5699 case IEMMODE_64BIT:
5700 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5701 IEM_MC_LOCAL(uint64_t, u64Value);
5702 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5705 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5706 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5707 IEM_MC_ADVANCE_RIP_AND_FINISH();
5708 IEM_MC_END();
5709 break;
5710
5711 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5712 }
5713 }
5714}
5715
5716
5717/**
5718 * opcode 0x63
5719 * @todo Table fixme
5720 */
5721FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5722{
5723 if (!IEM_IS_64BIT_CODE(pVCpu))
5724 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5725 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5726 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5727 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5728}
5729
5730
5731/**
5732 * @opcode 0x8c
5733 */
5734FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5735{
5736 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5737
5738 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5739
5740 /*
5741 * Check that the destination register exists. The REX.R prefix is ignored.
5742 */
5743 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5744 if (iSegReg > X86_SREG_GS)
5745 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5746
5747 /*
5748 * If rm is denoting a register, no more instruction bytes.
5749 * In that case, the operand size is respected and the upper bits are
5750 * cleared (starting with some pentium).
5751 */
5752 if (IEM_IS_MODRM_REG_MODE(bRm))
5753 {
5754 switch (pVCpu->iem.s.enmEffOpSize)
5755 {
5756 case IEMMODE_16BIT:
5757 IEM_MC_BEGIN(0, 1, 0, 0);
5758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5759 IEM_MC_LOCAL(uint16_t, u16Value);
5760 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5761 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5762 IEM_MC_ADVANCE_RIP_AND_FINISH();
5763 IEM_MC_END();
5764 break;
5765
5766 case IEMMODE_32BIT:
5767 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5769 IEM_MC_LOCAL(uint32_t, u32Value);
5770 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5771 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5772 IEM_MC_ADVANCE_RIP_AND_FINISH();
5773 IEM_MC_END();
5774 break;
5775
5776 case IEMMODE_64BIT:
5777 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5779 IEM_MC_LOCAL(uint64_t, u64Value);
5780 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5781 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5782 IEM_MC_ADVANCE_RIP_AND_FINISH();
5783 IEM_MC_END();
5784 break;
5785
5786 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5787 }
5788 }
5789 else
5790 {
5791 /*
5792 * We're saving the register to memory. The access is word sized
5793 * regardless of operand size prefixes.
5794 */
5795#if 0 /* not necessary */
5796 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5797#endif
5798 IEM_MC_BEGIN(0, 2, 0, 0);
5799 IEM_MC_LOCAL(uint16_t, u16Value);
5800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5803 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5804 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5805 IEM_MC_ADVANCE_RIP_AND_FINISH();
5806 IEM_MC_END();
5807 }
5808}
5809
5810
5811
5812
5813/**
5814 * @opcode 0x8d
5815 */
5816FNIEMOP_DEF(iemOp_lea_Gv_M)
5817{
5818 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5819 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5820 if (IEM_IS_MODRM_REG_MODE(bRm))
5821 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5822
5823 switch (pVCpu->iem.s.enmEffOpSize)
5824 {
5825 case IEMMODE_16BIT:
5826 IEM_MC_BEGIN(0, 2, 0, 0);
5827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5830 IEM_MC_LOCAL(uint16_t, u16Cast);
5831 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5832 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5833 IEM_MC_ADVANCE_RIP_AND_FINISH();
5834 IEM_MC_END();
5835 break;
5836
5837 case IEMMODE_32BIT:
5838 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5842 IEM_MC_LOCAL(uint32_t, u32Cast);
5843 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5844 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5845 IEM_MC_ADVANCE_RIP_AND_FINISH();
5846 IEM_MC_END();
5847 break;
5848
5849 case IEMMODE_64BIT:
5850 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5852 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5854 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5855 IEM_MC_ADVANCE_RIP_AND_FINISH();
5856 IEM_MC_END();
5857 break;
5858
5859 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5860 }
5861}
5862
5863
5864/**
5865 * @opcode 0x8e
5866 */
5867FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5868{
5869 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5870
5871 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5872
5873 /*
5874 * The practical operand size is 16-bit.
5875 */
5876#if 0 /* not necessary */
5877 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5878#endif
5879
5880 /*
5881 * Check that the destination register exists and can be used with this
5882 * instruction. The REX.R prefix is ignored.
5883 */
5884 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5885 /** @todo r=bird: What does 8086 do here wrt CS? */
5886 if ( iSegReg == X86_SREG_CS
5887 || iSegReg > X86_SREG_GS)
5888 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5889
5890 /*
5891 * If rm is denoting a register, no more instruction bytes.
5892 *
5893 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
5894 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
5895 * register. This is a restriction of the current recompiler
5896 * approach.
5897 */
5898 if (IEM_IS_MODRM_REG_MODE(bRm))
5899 {
5900#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
5901 IEM_MC_BEGIN(2, 0, 0, a_fCImplFlags); \
5902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5903 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5904 IEM_MC_ARG(uint16_t, u16Value, 1); \
5905 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5906 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5907 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5908 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5909 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg), \
5910 iemCImpl_load_SReg, iSRegArg, u16Value); \
5911 IEM_MC_END()
5912
5913 if (iSegReg == X86_SREG_SS)
5914 {
5915 if (IEM_IS_32BIT_CODE(pVCpu))
5916 {
5917 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5918 }
5919 else
5920 {
5921 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5922 }
5923 }
5924 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5925 {
5926 IEMOP_MOV_SW_EV_REG_BODY(0);
5927 }
5928 else
5929 {
5930 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
5931 }
5932#undef IEMOP_MOV_SW_EV_REG_BODY
5933 }
5934 else
5935 {
5936 /*
5937 * We're loading the register from memory. The access is word sized
5938 * regardless of operand size prefixes.
5939 */
5940#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
5941 IEM_MC_BEGIN(2, 1, 0, a_fCImplFlags); \
5942 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5943 IEM_MC_ARG(uint16_t, u16Value, 1); \
5944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5945 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5947 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5948 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5949 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5950 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5951 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg), \
5952 iemCImpl_load_SReg, iSRegArg, u16Value); \
5953 IEM_MC_END()
5954
5955 if (iSegReg == X86_SREG_SS)
5956 {
5957 if (IEM_IS_32BIT_CODE(pVCpu))
5958 {
5959 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5960 }
5961 else
5962 {
5963 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5964 }
5965 }
5966 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5967 {
5968 IEMOP_MOV_SW_EV_MEM_BODY(0);
5969 }
5970 else
5971 {
5972 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
5973 }
5974#undef IEMOP_MOV_SW_EV_MEM_BODY
5975 }
5976}
5977
5978
5979/** Opcode 0x8f /0. */
5980FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5981{
5982 /* This bugger is rather annoying as it requires rSP to be updated before
5983 doing the effective address calculations. Will eventually require a
5984 split between the R/M+SIB decoding and the effective address
5985 calculation - which is something that is required for any attempt at
5986 reusing this code for a recompiler. It may also be good to have if we
5987 need to delay #UD exception caused by invalid lock prefixes.
5988
5989 For now, we'll do a mostly safe interpreter-only implementation here. */
5990 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5991 * now until tests show it's checked.. */
5992 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5993
5994 /* Register access is relatively easy and can share code. */
5995 if (IEM_IS_MODRM_REG_MODE(bRm))
5996 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5997
5998 /*
5999 * Memory target.
6000 *
6001 * Intel says that RSP is incremented before it's used in any effective
6002 * address calcuations. This means some serious extra annoyance here since
6003 * we decode and calculate the effective address in one step and like to
6004 * delay committing registers till everything is done.
6005 *
6006 * So, we'll decode and calculate the effective address twice. This will
6007 * require some recoding if turned into a recompiler.
6008 */
6009 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
6010
6011#if 1 /* This can be compiled, optimize later if needed. */
6012 switch (pVCpu->iem.s.enmEffOpSize)
6013 {
6014 case IEMMODE_16BIT:
6015 IEM_MC_BEGIN(2, 0, 0, 0);
6016 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
6018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6019 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6020 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
6021 IEM_MC_END();
6022 break;
6023
6024 case IEMMODE_32BIT:
6025 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
6026 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
6028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6029 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6030 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
6031 IEM_MC_END();
6032 break;
6033
6034 case IEMMODE_64BIT:
6035 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
6036 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
6038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6039 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6040 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
6041 IEM_MC_END();
6042 break;
6043
6044 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6045 }
6046
6047#else
6048# ifndef TST_IEM_CHECK_MC
6049 /* Calc effective address with modified ESP. */
6050/** @todo testcase */
6051 RTGCPTR GCPtrEff;
6052 VBOXSTRICTRC rcStrict;
6053 switch (pVCpu->iem.s.enmEffOpSize)
6054 {
6055 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
6056 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
6057 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
6058 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6059 }
6060 if (rcStrict != VINF_SUCCESS)
6061 return rcStrict;
6062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6063
6064 /* Perform the operation - this should be CImpl. */
6065 RTUINT64U TmpRsp;
6066 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
6067 switch (pVCpu->iem.s.enmEffOpSize)
6068 {
6069 case IEMMODE_16BIT:
6070 {
6071 uint16_t u16Value;
6072 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
6073 if (rcStrict == VINF_SUCCESS)
6074 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
6075 break;
6076 }
6077
6078 case IEMMODE_32BIT:
6079 {
6080 uint32_t u32Value;
6081 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
6082 if (rcStrict == VINF_SUCCESS)
6083 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
6084 break;
6085 }
6086
6087 case IEMMODE_64BIT:
6088 {
6089 uint64_t u64Value;
6090 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
6091 if (rcStrict == VINF_SUCCESS)
6092 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
6093 break;
6094 }
6095
6096 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6097 }
6098 if (rcStrict == VINF_SUCCESS)
6099 {
6100 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
6101 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
6102 }
6103 return rcStrict;
6104
6105# else
6106 return VERR_IEM_IPE_2;
6107# endif
6108#endif
6109}
6110
6111
6112/**
6113 * @opcode 0x8f
6114 */
6115FNIEMOP_DEF(iemOp_Grp1A__xop)
6116{
6117 /*
6118 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
6119 * three byte VEX prefix, except that the mmmmm field cannot have the values
6120 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
6121 */
6122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6123 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
6124 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
6125
6126 IEMOP_MNEMONIC(xop, "xop");
6127 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
6128 {
6129 /** @todo Test when exctly the XOP conformance checks kick in during
6130 * instruction decoding and fetching (using \#PF). */
6131 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6132 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6133 if ( ( pVCpu->iem.s.fPrefixes
6134 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6135 == 0)
6136 {
6137 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6138 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6139 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6140 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6141 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6142 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6143 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6144 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6145 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6146
6147 /** @todo XOP: Just use new tables and decoders. */
6148 switch (bRm & 0x1f)
6149 {
6150 case 8: /* xop opcode map 8. */
6151 IEMOP_BITCH_ABOUT_STUB();
6152 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6153
6154 case 9: /* xop opcode map 9. */
6155 IEMOP_BITCH_ABOUT_STUB();
6156 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6157
6158 case 10: /* xop opcode map 10. */
6159 IEMOP_BITCH_ABOUT_STUB();
6160 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6161
6162 default:
6163 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6164 IEMOP_RAISE_INVALID_OPCODE_RET();
6165 }
6166 }
6167 else
6168 Log(("XOP: Invalid prefix mix!\n"));
6169 }
6170 else
6171 Log(("XOP: XOP support disabled!\n"));
6172 IEMOP_RAISE_INVALID_OPCODE_RET();
6173}
6174
6175
6176/**
6177 * Common 'xchg reg,rAX' helper.
6178 */
6179FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6180{
6181 iReg |= pVCpu->iem.s.uRexB;
6182 switch (pVCpu->iem.s.enmEffOpSize)
6183 {
6184 case IEMMODE_16BIT:
6185 IEM_MC_BEGIN(0, 2, 0, 0);
6186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6187 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6188 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6189 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6190 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6191 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6192 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6193 IEM_MC_ADVANCE_RIP_AND_FINISH();
6194 IEM_MC_END();
6195 break;
6196
6197 case IEMMODE_32BIT:
6198 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6200 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6201 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6202 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6203 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6204 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6205 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6206 IEM_MC_ADVANCE_RIP_AND_FINISH();
6207 IEM_MC_END();
6208 break;
6209
6210 case IEMMODE_64BIT:
6211 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6213 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6214 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6215 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6216 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6217 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6218 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6219 IEM_MC_ADVANCE_RIP_AND_FINISH();
6220 IEM_MC_END();
6221 break;
6222
6223 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6224 }
6225}
6226
6227
6228/**
6229 * @opcode 0x90
6230 */
6231FNIEMOP_DEF(iemOp_nop)
6232{
6233 /* R8/R8D and RAX/EAX can be exchanged. */
6234 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6235 {
6236 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6237 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6238 }
6239
6240 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6241 {
6242 IEMOP_MNEMONIC(pause, "pause");
6243 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6244 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6245 if (!IEM_IS_IN_GUEST(pVCpu))
6246 { /* probable */ }
6247#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6248 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6249 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmx_pause);
6250#endif
6251#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6252 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6253 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_svm_pause);
6254#endif
6255 }
6256 else
6257 IEMOP_MNEMONIC(nop, "nop");
6258 /** @todo testcase: lock nop; lock pause */
6259 IEM_MC_BEGIN(0, 0, 0, 0);
6260 IEMOP_HLP_DONE_DECODING();
6261 IEM_MC_ADVANCE_RIP_AND_FINISH();
6262 IEM_MC_END();
6263}
6264
6265
6266/**
6267 * @opcode 0x91
6268 */
6269FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6270{
6271 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6272 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6273}
6274
6275
6276/**
6277 * @opcode 0x92
6278 */
6279FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6280{
6281 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6282 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6283}
6284
6285
6286/**
6287 * @opcode 0x93
6288 */
6289FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6290{
6291 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6292 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6293}
6294
6295
6296/**
6297 * @opcode 0x94
6298 */
6299FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6300{
6301 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6302 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6303}
6304
6305
6306/**
6307 * @opcode 0x95
6308 */
6309FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6310{
6311 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6312 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6313}
6314
6315
6316/**
6317 * @opcode 0x96
6318 */
6319FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6320{
6321 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6322 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6323}
6324
6325
6326/**
6327 * @opcode 0x97
6328 */
6329FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6330{
6331 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6332 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6333}
6334
6335
6336/**
6337 * @opcode 0x98
6338 */
6339FNIEMOP_DEF(iemOp_cbw)
6340{
6341 switch (pVCpu->iem.s.enmEffOpSize)
6342 {
6343 case IEMMODE_16BIT:
6344 IEMOP_MNEMONIC(cbw, "cbw");
6345 IEM_MC_BEGIN(0, 1, 0, 0);
6346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6347 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6348 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6349 } IEM_MC_ELSE() {
6350 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6351 } IEM_MC_ENDIF();
6352 IEM_MC_ADVANCE_RIP_AND_FINISH();
6353 IEM_MC_END();
6354 break;
6355
6356 case IEMMODE_32BIT:
6357 IEMOP_MNEMONIC(cwde, "cwde");
6358 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6360 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6361 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6362 } IEM_MC_ELSE() {
6363 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6364 } IEM_MC_ENDIF();
6365 IEM_MC_ADVANCE_RIP_AND_FINISH();
6366 IEM_MC_END();
6367 break;
6368
6369 case IEMMODE_64BIT:
6370 IEMOP_MNEMONIC(cdqe, "cdqe");
6371 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6373 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6374 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6375 } IEM_MC_ELSE() {
6376 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6377 } IEM_MC_ENDIF();
6378 IEM_MC_ADVANCE_RIP_AND_FINISH();
6379 IEM_MC_END();
6380 break;
6381
6382 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6383 }
6384}
6385
6386
6387/**
6388 * @opcode 0x99
6389 */
6390FNIEMOP_DEF(iemOp_cwd)
6391{
6392 switch (pVCpu->iem.s.enmEffOpSize)
6393 {
6394 case IEMMODE_16BIT:
6395 IEMOP_MNEMONIC(cwd, "cwd");
6396 IEM_MC_BEGIN(0, 1, 0, 0);
6397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6398 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6399 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6400 } IEM_MC_ELSE() {
6401 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6402 } IEM_MC_ENDIF();
6403 IEM_MC_ADVANCE_RIP_AND_FINISH();
6404 IEM_MC_END();
6405 break;
6406
6407 case IEMMODE_32BIT:
6408 IEMOP_MNEMONIC(cdq, "cdq");
6409 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6411 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6412 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6413 } IEM_MC_ELSE() {
6414 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6415 } IEM_MC_ENDIF();
6416 IEM_MC_ADVANCE_RIP_AND_FINISH();
6417 IEM_MC_END();
6418 break;
6419
6420 case IEMMODE_64BIT:
6421 IEMOP_MNEMONIC(cqo, "cqo");
6422 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6424 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6425 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6426 } IEM_MC_ELSE() {
6427 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6428 } IEM_MC_ENDIF();
6429 IEM_MC_ADVANCE_RIP_AND_FINISH();
6430 IEM_MC_END();
6431 break;
6432
6433 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6434 }
6435}
6436
6437
6438/**
6439 * @opcode 0x9a
6440 */
6441FNIEMOP_DEF(iemOp_call_Ap)
6442{
6443 IEMOP_MNEMONIC(call_Ap, "call Ap");
6444 IEMOP_HLP_NO_64BIT();
6445
6446 /* Decode the far pointer address and pass it on to the far call C implementation. */
6447 uint32_t off32Seg;
6448 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6449 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6450 else
6451 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6452 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6454 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
6455 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
6456 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6457 /** @todo make task-switches, ring-switches, ++ return non-zero status */
6458}
6459
6460
6461/** Opcode 0x9b. (aka fwait) */
6462FNIEMOP_DEF(iemOp_wait)
6463{
6464 IEMOP_MNEMONIC(wait, "wait");
6465 IEM_MC_BEGIN(0, 0, 0, 0);
6466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6467 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6468 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6469 IEM_MC_ADVANCE_RIP_AND_FINISH();
6470 IEM_MC_END();
6471}
6472
6473
6474/**
6475 * @opcode 0x9c
6476 */
6477FNIEMOP_DEF(iemOp_pushf_Fv)
6478{
6479 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6481 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6482 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6483 iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6484}
6485
6486
6487/**
6488 * @opcode 0x9d
6489 */
6490FNIEMOP_DEF(iemOp_popf_Fv)
6491{
6492 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6494 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6495 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6496 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6497 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6498}
6499
6500
6501/**
6502 * @opcode 0x9e
6503 */
6504FNIEMOP_DEF(iemOp_sahf)
6505{
6506 IEMOP_MNEMONIC(sahf, "sahf");
6507 if ( IEM_IS_64BIT_CODE(pVCpu)
6508 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6509 IEMOP_RAISE_INVALID_OPCODE_RET();
6510 IEM_MC_BEGIN(0, 2, 0, 0);
6511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6512 IEM_MC_LOCAL(uint32_t, u32Flags);
6513 IEM_MC_LOCAL(uint32_t, EFlags);
6514 IEM_MC_FETCH_EFLAGS(EFlags);
6515 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6516 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6517 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6518 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6519 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6520 IEM_MC_COMMIT_EFLAGS(EFlags);
6521 IEM_MC_ADVANCE_RIP_AND_FINISH();
6522 IEM_MC_END();
6523}
6524
6525
6526/**
6527 * @opcode 0x9f
6528 */
6529FNIEMOP_DEF(iemOp_lahf)
6530{
6531 IEMOP_MNEMONIC(lahf, "lahf");
6532 if ( IEM_IS_64BIT_CODE(pVCpu)
6533 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6534 IEMOP_RAISE_INVALID_OPCODE_RET();
6535 IEM_MC_BEGIN(0, 1, 0, 0);
6536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6537 IEM_MC_LOCAL(uint8_t, u8Flags);
6538 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6539 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6540 IEM_MC_ADVANCE_RIP_AND_FINISH();
6541 IEM_MC_END();
6542}
6543
6544
6545/**
6546 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6547 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6548 * Will return/throw on failures.
6549 * @param a_GCPtrMemOff The variable to store the offset in.
6550 */
6551#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6552 do \
6553 { \
6554 switch (pVCpu->iem.s.enmEffAddrMode) \
6555 { \
6556 case IEMMODE_16BIT: \
6557 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6558 break; \
6559 case IEMMODE_32BIT: \
6560 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6561 break; \
6562 case IEMMODE_64BIT: \
6563 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6564 break; \
6565 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6566 } \
6567 } while (0)
6568
6569/**
6570 * @opcode 0xa0
6571 */
6572FNIEMOP_DEF(iemOp_mov_AL_Ob)
6573{
6574 /*
6575 * Get the offset.
6576 */
6577 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6578 RTGCPTR GCPtrMemOffDecode;
6579 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6580
6581 /*
6582 * Fetch AL.
6583 */
6584 IEM_MC_BEGIN(0, 2, 0, 0);
6585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6586 IEM_MC_LOCAL(uint8_t, u8Tmp);
6587 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6588 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6589 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6590 IEM_MC_ADVANCE_RIP_AND_FINISH();
6591 IEM_MC_END();
6592}
6593
6594
6595/**
6596 * @opcode 0xa1
6597 */
6598FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6599{
6600 /*
6601 * Get the offset.
6602 */
6603 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6604 RTGCPTR GCPtrMemOffDecode;
6605 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6606
6607 /*
6608 * Fetch rAX.
6609 */
6610 switch (pVCpu->iem.s.enmEffOpSize)
6611 {
6612 case IEMMODE_16BIT:
6613 IEM_MC_BEGIN(0, 2, 0, 0);
6614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6615 IEM_MC_LOCAL(uint16_t, u16Tmp);
6616 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6617 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6618 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6619 IEM_MC_ADVANCE_RIP_AND_FINISH();
6620 IEM_MC_END();
6621 break;
6622
6623 case IEMMODE_32BIT:
6624 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6626 IEM_MC_LOCAL(uint32_t, u32Tmp);
6627 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6628 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6629 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6630 IEM_MC_ADVANCE_RIP_AND_FINISH();
6631 IEM_MC_END();
6632 break;
6633
6634 case IEMMODE_64BIT:
6635 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6637 IEM_MC_LOCAL(uint64_t, u64Tmp);
6638 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6639 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6640 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6641 IEM_MC_ADVANCE_RIP_AND_FINISH();
6642 IEM_MC_END();
6643 break;
6644
6645 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6646 }
6647}
6648
6649
6650/**
6651 * @opcode 0xa2
6652 */
6653FNIEMOP_DEF(iemOp_mov_Ob_AL)
6654{
6655 /*
6656 * Get the offset.
6657 */
6658 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6659 RTGCPTR GCPtrMemOffDecode;
6660 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6661
6662 /*
6663 * Store AL.
6664 */
6665 IEM_MC_BEGIN(0, 2, 0, 0);
6666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6667 IEM_MC_LOCAL(uint8_t, u8Tmp);
6668 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6669 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6670 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6671 IEM_MC_ADVANCE_RIP_AND_FINISH();
6672 IEM_MC_END();
6673}
6674
6675
6676/**
6677 * @opcode 0xa3
6678 */
6679FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6680{
6681 /*
6682 * Get the offset.
6683 */
6684 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6685 RTGCPTR GCPtrMemOffDecode;
6686 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6687
6688 /*
6689 * Store rAX.
6690 */
6691 switch (pVCpu->iem.s.enmEffOpSize)
6692 {
6693 case IEMMODE_16BIT:
6694 IEM_MC_BEGIN(0, 2, 0, 0);
6695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6696 IEM_MC_LOCAL(uint16_t, u16Tmp);
6697 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6698 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6699 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6700 IEM_MC_ADVANCE_RIP_AND_FINISH();
6701 IEM_MC_END();
6702 break;
6703
6704 case IEMMODE_32BIT:
6705 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6707 IEM_MC_LOCAL(uint32_t, u32Tmp);
6708 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6709 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6710 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6711 IEM_MC_ADVANCE_RIP_AND_FINISH();
6712 IEM_MC_END();
6713 break;
6714
6715 case IEMMODE_64BIT:
6716 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6718 IEM_MC_LOCAL(uint64_t, u64Tmp);
6719 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6720 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6721 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6722 IEM_MC_ADVANCE_RIP_AND_FINISH();
6723 IEM_MC_END();
6724 break;
6725
6726 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6727 }
6728}
6729
6730/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6731#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
6732 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6734 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6735 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6736 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6737 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6738 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6739 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6740 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6741 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6742 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6743 } IEM_MC_ELSE() { \
6744 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6745 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6746 } IEM_MC_ENDIF(); \
6747 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6748 IEM_MC_END() \
6749
6750/**
6751 * @opcode 0xa4
6752 */
6753FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6754{
6755 /*
6756 * Use the C implementation if a repeat prefix is encountered.
6757 */
6758 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6759 {
6760 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6762 switch (pVCpu->iem.s.enmEffAddrMode)
6763 {
6764 case IEMMODE_16BIT:
6765 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6766 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6767 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6768 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6769 iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6770 case IEMMODE_32BIT:
6771 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6772 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6773 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6774 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6775 iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6776 case IEMMODE_64BIT:
6777 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6778 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6779 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6780 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6781 iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6782 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6783 }
6784 }
6785
6786 /*
6787 * Sharing case implementation with movs[wdq] below.
6788 */
6789 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6790 switch (pVCpu->iem.s.enmEffAddrMode)
6791 {
6792 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6793 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6794 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
6795 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6796 }
6797}
6798
6799
6800/**
6801 * @opcode 0xa5
6802 */
6803FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6804{
6805
6806 /*
6807 * Use the C implementation if a repeat prefix is encountered.
6808 */
6809 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6810 {
6811 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6813 switch (pVCpu->iem.s.enmEffOpSize)
6814 {
6815 case IEMMODE_16BIT:
6816 switch (pVCpu->iem.s.enmEffAddrMode)
6817 {
6818 case IEMMODE_16BIT:
6819 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6820 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6821 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6822 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6823 iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6824 case IEMMODE_32BIT:
6825 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6826 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6827 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6828 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6829 iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6830 case IEMMODE_64BIT:
6831 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6832 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6833 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6834 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6835 iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6836 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6837 }
6838 break;
6839 case IEMMODE_32BIT:
6840 switch (pVCpu->iem.s.enmEffAddrMode)
6841 {
6842 case IEMMODE_16BIT:
6843 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6844 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6845 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6846 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6847 iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6848 case IEMMODE_32BIT:
6849 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6850 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6851 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6852 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6853 iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6854 case IEMMODE_64BIT:
6855 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6856 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6857 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6858 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6859 iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6861 }
6862 case IEMMODE_64BIT:
6863 switch (pVCpu->iem.s.enmEffAddrMode)
6864 {
6865 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6866 case IEMMODE_32BIT:
6867 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6868 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6869 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6870 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6871 iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6872 case IEMMODE_64BIT:
6873 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6874 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6875 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6876 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6877 iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6878 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6879 }
6880 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6881 }
6882 }
6883
6884 /*
6885 * Annoying double switch here.
6886 * Using ugly macro for implementing the cases, sharing it with movsb.
6887 */
6888 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6889 switch (pVCpu->iem.s.enmEffOpSize)
6890 {
6891 case IEMMODE_16BIT:
6892 switch (pVCpu->iem.s.enmEffAddrMode)
6893 {
6894 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6895 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6896 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
6897 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6898 }
6899 break;
6900
6901 case IEMMODE_32BIT:
6902 switch (pVCpu->iem.s.enmEffAddrMode)
6903 {
6904 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6905 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6906 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
6907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6908 }
6909 break;
6910
6911 case IEMMODE_64BIT:
6912 switch (pVCpu->iem.s.enmEffAddrMode)
6913 {
6914 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6915 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
6916 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
6917 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6918 }
6919 break;
6920 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6921 }
6922}
6923
6924#undef IEM_MOVS_CASE
6925
6926/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6927#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
6928 IEM_MC_BEGIN(3, 3, a_fMcFlags, 0); \
6929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6930 \
6931 IEM_MC_LOCAL(RTGCPTR, uAddr1); \
6932 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr1, X86_GREG_xSI); \
6933 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6934 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr1); \
6935 \
6936 IEM_MC_LOCAL(RTGCPTR, uAddr2); \
6937 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr2, X86_GREG_xDI); \
6938 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6939 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr2); \
6940 \
6941 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6942 IEM_MC_REF_EFLAGS(pEFlags); \
6943 IEM_MC_ARG_LOCAL_REF(uint##ValBits##_t *, puValue1, uValue1, 0); \
6944 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6945 \
6946 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6947 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6948 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6949 } IEM_MC_ELSE() { \
6950 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6951 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6952 } IEM_MC_ENDIF(); \
6953 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6954 IEM_MC_END() \
6955
6956/**
6957 * @opcode 0xa6
6958 */
6959FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6960{
6961
6962 /*
6963 * Use the C implementation if a repeat prefix is encountered.
6964 */
6965 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6966 {
6967 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6969 switch (pVCpu->iem.s.enmEffAddrMode)
6970 {
6971 case IEMMODE_16BIT:
6972 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6973 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6974 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6975 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6976 iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6977 case IEMMODE_32BIT:
6978 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6979 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6980 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6981 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6982 iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6983 case IEMMODE_64BIT:
6984 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6985 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6986 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6987 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6988 iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6989 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6990 }
6991 }
6992 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6993 {
6994 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6996 switch (pVCpu->iem.s.enmEffAddrMode)
6997 {
6998 case IEMMODE_16BIT:
6999 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7000 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7001 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7002 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7003 iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
7004 case IEMMODE_32BIT:
7005 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7006 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7007 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7008 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7009 iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7010 case IEMMODE_64BIT:
7011 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7012 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7013 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7014 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7015 iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7016 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7017 }
7018 }
7019
7020 /*
7021 * Sharing case implementation with cmps[wdq] below.
7022 */
7023 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
7024 switch (pVCpu->iem.s.enmEffAddrMode)
7025 {
7026 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7027 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7028 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
7029 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7030 }
7031}
7032
7033
7034/**
7035 * @opcode 0xa7
7036 */
7037FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
7038{
7039 /*
7040 * Use the C implementation if a repeat prefix is encountered.
7041 */
7042 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7043 {
7044 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
7045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7046 switch (pVCpu->iem.s.enmEffOpSize)
7047 {
7048 case IEMMODE_16BIT:
7049 switch (pVCpu->iem.s.enmEffAddrMode)
7050 {
7051 case IEMMODE_16BIT:
7052 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7053 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7054 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7055 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7056 iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7057 case IEMMODE_32BIT:
7058 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7059 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7060 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7061 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7062 iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7063 case IEMMODE_64BIT:
7064 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7065 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7066 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7067 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7068 iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7070 }
7071 break;
7072 case IEMMODE_32BIT:
7073 switch (pVCpu->iem.s.enmEffAddrMode)
7074 {
7075 case IEMMODE_16BIT:
7076 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7077 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7078 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7079 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7080 iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7081 case IEMMODE_32BIT:
7082 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7083 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7084 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7085 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7086 iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7087 case IEMMODE_64BIT:
7088 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7089 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7090 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7091 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7092 iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7093 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7094 }
7095 case IEMMODE_64BIT:
7096 switch (pVCpu->iem.s.enmEffAddrMode)
7097 {
7098 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
7099 case IEMMODE_32BIT:
7100 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7101 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7102 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7103 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7104 iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7105 case IEMMODE_64BIT:
7106 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7107 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7108 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7109 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7110 iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7111 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7112 }
7113 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7114 }
7115 }
7116
7117 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7118 {
7119 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
7120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7121 switch (pVCpu->iem.s.enmEffOpSize)
7122 {
7123 case IEMMODE_16BIT:
7124 switch (pVCpu->iem.s.enmEffAddrMode)
7125 {
7126 case IEMMODE_16BIT:
7127 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7128 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7129 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7130 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7131 iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7132 case IEMMODE_32BIT:
7133 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7134 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7135 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7136 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7137 iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7138 case IEMMODE_64BIT:
7139 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7140 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7141 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7142 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7143 iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7144 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7145 }
7146 break;
7147 case IEMMODE_32BIT:
7148 switch (pVCpu->iem.s.enmEffAddrMode)
7149 {
7150 case IEMMODE_16BIT:
7151 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7152 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7153 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7154 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7155 iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7156 case IEMMODE_32BIT:
7157 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7158 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7159 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7160 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7161 iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7162 case IEMMODE_64BIT:
7163 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7164 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7165 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7166 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7167 iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7168 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7169 }
7170 case IEMMODE_64BIT:
7171 switch (pVCpu->iem.s.enmEffAddrMode)
7172 {
7173 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
7174 case IEMMODE_32BIT:
7175 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7176 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7177 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7178 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7179 iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7180 case IEMMODE_64BIT:
7181 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7182 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7183 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7184 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7185 iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7186 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7187 }
7188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7189 }
7190 }
7191
7192 /*
7193 * Annoying double switch here.
7194 * Using ugly macro for implementing the cases, sharing it with cmpsb.
7195 */
7196 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
7197 switch (pVCpu->iem.s.enmEffOpSize)
7198 {
7199 case IEMMODE_16BIT:
7200 switch (pVCpu->iem.s.enmEffAddrMode)
7201 {
7202 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7203 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7204 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
7205 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7206 }
7207 break;
7208
7209 case IEMMODE_32BIT:
7210 switch (pVCpu->iem.s.enmEffAddrMode)
7211 {
7212 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7213 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7214 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
7215 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7216 }
7217 break;
7218
7219 case IEMMODE_64BIT:
7220 switch (pVCpu->iem.s.enmEffAddrMode)
7221 {
7222 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7223 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
7224 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
7225 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7226 }
7227 break;
7228 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7229 }
7230}
7231
7232#undef IEM_CMPS_CASE
7233
7234/**
7235 * @opcode 0xa8
7236 */
7237FNIEMOP_DEF(iemOp_test_AL_Ib)
7238{
7239 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
7240 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7241 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
7242}
7243
7244
7245/**
7246 * @opcode 0xa9
7247 */
7248FNIEMOP_DEF(iemOp_test_eAX_Iz)
7249{
7250 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
7251 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7252 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
7253}
7254
7255
7256/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
7257#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
7258 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7260 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7261 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7262 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
7263 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7264 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7265 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7266 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7267 } IEM_MC_ELSE() { \
7268 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7269 } IEM_MC_ENDIF(); \
7270 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7271 IEM_MC_END() \
7272
7273/**
7274 * @opcode 0xaa
7275 */
7276FNIEMOP_DEF(iemOp_stosb_Yb_AL)
7277{
7278 /*
7279 * Use the C implementation if a repeat prefix is encountered.
7280 */
7281 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7282 {
7283 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
7284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7285 switch (pVCpu->iem.s.enmEffAddrMode)
7286 {
7287 case IEMMODE_16BIT:
7288 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7289 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7290 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7291 iemCImpl_stos_al_m16);
7292 case IEMMODE_32BIT:
7293 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7294 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7295 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7296 iemCImpl_stos_al_m32);
7297 case IEMMODE_64BIT:
7298 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7299 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7300 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7301 iemCImpl_stos_al_m64);
7302 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7303 }
7304 }
7305
7306 /*
7307 * Sharing case implementation with stos[wdq] below.
7308 */
7309 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
7310 switch (pVCpu->iem.s.enmEffAddrMode)
7311 {
7312 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7313 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7314 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
7315 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7316 }
7317}
7318
7319
7320/**
7321 * @opcode 0xab
7322 */
7323FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7324{
7325 /*
7326 * Use the C implementation if a repeat prefix is encountered.
7327 */
7328 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7329 {
7330 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7332 switch (pVCpu->iem.s.enmEffOpSize)
7333 {
7334 case IEMMODE_16BIT:
7335 switch (pVCpu->iem.s.enmEffAddrMode)
7336 {
7337 case IEMMODE_16BIT:
7338 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7339 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7340 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7341 iemCImpl_stos_ax_m16);
7342 case IEMMODE_32BIT:
7343 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7344 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7345 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7346 iemCImpl_stos_ax_m32);
7347 case IEMMODE_64BIT:
7348 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7349 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7350 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7351 iemCImpl_stos_ax_m64);
7352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7353 }
7354 break;
7355 case IEMMODE_32BIT:
7356 switch (pVCpu->iem.s.enmEffAddrMode)
7357 {
7358 case IEMMODE_16BIT:
7359 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7360 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7361 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7362 iemCImpl_stos_eax_m16);
7363 case IEMMODE_32BIT:
7364 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7365 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7366 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7367 iemCImpl_stos_eax_m32);
7368 case IEMMODE_64BIT:
7369 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7370 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7371 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7372 iemCImpl_stos_eax_m64);
7373 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7374 }
7375 case IEMMODE_64BIT:
7376 switch (pVCpu->iem.s.enmEffAddrMode)
7377 {
7378 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7379 case IEMMODE_32BIT:
7380 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7381 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7382 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7383 iemCImpl_stos_rax_m32);
7384 case IEMMODE_64BIT:
7385 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7386 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7387 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7388 iemCImpl_stos_rax_m64);
7389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7390 }
7391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7392 }
7393 }
7394
7395 /*
7396 * Annoying double switch here.
7397 * Using ugly macro for implementing the cases, sharing it with stosb.
7398 */
7399 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7400 switch (pVCpu->iem.s.enmEffOpSize)
7401 {
7402 case IEMMODE_16BIT:
7403 switch (pVCpu->iem.s.enmEffAddrMode)
7404 {
7405 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7406 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7407 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7408 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7409 }
7410 break;
7411
7412 case IEMMODE_32BIT:
7413 switch (pVCpu->iem.s.enmEffAddrMode)
7414 {
7415 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7416 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7417 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7418 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7419 }
7420 break;
7421
7422 case IEMMODE_64BIT:
7423 switch (pVCpu->iem.s.enmEffAddrMode)
7424 {
7425 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7426 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7427 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7429 }
7430 break;
7431 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7432 }
7433}
7434
7435#undef IEM_STOS_CASE
7436
7437/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7438#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7439 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7441 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7442 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7443 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7444 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7445 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7446 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7447 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7448 } IEM_MC_ELSE() { \
7449 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7450 } IEM_MC_ENDIF(); \
7451 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7452 IEM_MC_END() \
7453
7454/**
7455 * @opcode 0xac
7456 */
7457FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7458{
7459 /*
7460 * Use the C implementation if a repeat prefix is encountered.
7461 */
7462 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7463 {
7464 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7466 switch (pVCpu->iem.s.enmEffAddrMode)
7467 {
7468 case IEMMODE_16BIT:
7469 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7470 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7471 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7472 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7473 iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7474 case IEMMODE_32BIT:
7475 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7476 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7477 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7478 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7479 iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7480 case IEMMODE_64BIT:
7481 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7482 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7483 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7484 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7485 iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7486 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7487 }
7488 }
7489
7490 /*
7491 * Sharing case implementation with stos[wdq] below.
7492 */
7493 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7494 switch (pVCpu->iem.s.enmEffAddrMode)
7495 {
7496 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7497 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7498 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7499 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7500 }
7501}
7502
7503
7504/**
7505 * @opcode 0xad
7506 */
7507FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7508{
7509 /*
7510 * Use the C implementation if a repeat prefix is encountered.
7511 */
7512 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7513 {
7514 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7516 switch (pVCpu->iem.s.enmEffOpSize)
7517 {
7518 case IEMMODE_16BIT:
7519 switch (pVCpu->iem.s.enmEffAddrMode)
7520 {
7521 case IEMMODE_16BIT:
7522 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7523 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7524 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7525 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7526 iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7527 case IEMMODE_32BIT:
7528 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7529 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7530 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7531 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7532 iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7533 case IEMMODE_64BIT:
7534 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7535 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7536 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7537 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7538 iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7539 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7540 }
7541 break;
7542 case IEMMODE_32BIT:
7543 switch (pVCpu->iem.s.enmEffAddrMode)
7544 {
7545 case IEMMODE_16BIT:
7546 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7547 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7548 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7549 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7550 iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7551 case IEMMODE_32BIT:
7552 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7553 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7554 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7555 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7556 iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7557 case IEMMODE_64BIT:
7558 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7559 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7560 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7561 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7562 iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7563 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7564 }
7565 case IEMMODE_64BIT:
7566 switch (pVCpu->iem.s.enmEffAddrMode)
7567 {
7568 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7569 case IEMMODE_32BIT:
7570 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7571 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7572 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7573 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7574 iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7575 case IEMMODE_64BIT:
7576 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7577 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7578 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7579 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7580 iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7581 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7582 }
7583 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7584 }
7585 }
7586
7587 /*
7588 * Annoying double switch here.
7589 * Using ugly macro for implementing the cases, sharing it with lodsb.
7590 */
7591 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7592 switch (pVCpu->iem.s.enmEffOpSize)
7593 {
7594 case IEMMODE_16BIT:
7595 switch (pVCpu->iem.s.enmEffAddrMode)
7596 {
7597 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7598 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7599 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
7600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7601 }
7602 break;
7603
7604 case IEMMODE_32BIT:
7605 switch (pVCpu->iem.s.enmEffAddrMode)
7606 {
7607 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7608 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7609 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
7610 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7611 }
7612 break;
7613
7614 case IEMMODE_64BIT:
7615 switch (pVCpu->iem.s.enmEffAddrMode)
7616 {
7617 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7618 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
7619 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
7620 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7621 }
7622 break;
7623 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7624 }
7625}
7626
7627#undef IEM_LODS_CASE
7628
7629/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7630#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
7631 IEM_MC_BEGIN(3, 2, a_fMcFlags, 0); \
7632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7633 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7634 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7635 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7636 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7637 \
7638 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7639 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7640 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7641 IEM_MC_REF_EFLAGS(pEFlags); \
7642 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7643 \
7644 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7645 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7646 } IEM_MC_ELSE() { \
7647 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7648 } IEM_MC_ENDIF(); \
7649 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7650 IEM_MC_END();
7651
7652/**
7653 * @opcode 0xae
7654 */
7655FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7656{
7657 /*
7658 * Use the C implementation if a repeat prefix is encountered.
7659 */
7660 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7661 {
7662 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7664 switch (pVCpu->iem.s.enmEffAddrMode)
7665 {
7666 case IEMMODE_16BIT:
7667 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7668 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7669 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7670 iemCImpl_repe_scas_al_m16);
7671 case IEMMODE_32BIT:
7672 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7673 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7674 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7675 iemCImpl_repe_scas_al_m32);
7676 case IEMMODE_64BIT:
7677 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7678 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7679 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7680 iemCImpl_repe_scas_al_m64);
7681 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7682 }
7683 }
7684 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7685 {
7686 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7688 switch (pVCpu->iem.s.enmEffAddrMode)
7689 {
7690 case IEMMODE_16BIT:
7691 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7692 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7693 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7694 iemCImpl_repne_scas_al_m16);
7695 case IEMMODE_32BIT:
7696 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7697 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7698 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7699 iemCImpl_repne_scas_al_m32);
7700 case IEMMODE_64BIT:
7701 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7702 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7703 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7704 iemCImpl_repne_scas_al_m64);
7705 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7706 }
7707 }
7708
7709 /*
7710 * Sharing case implementation with stos[wdq] below.
7711 */
7712 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7713 switch (pVCpu->iem.s.enmEffAddrMode)
7714 {
7715 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7716 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7717 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
7718 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7719 }
7720}
7721
7722
7723/**
7724 * @opcode 0xaf
7725 */
7726FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7727{
7728 /*
7729 * Use the C implementation if a repeat prefix is encountered.
7730 */
7731 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7732 {
7733 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7735 switch (pVCpu->iem.s.enmEffOpSize)
7736 {
7737 case IEMMODE_16BIT:
7738 switch (pVCpu->iem.s.enmEffAddrMode)
7739 {
7740 case IEMMODE_16BIT:
7741 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7742 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7743 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7744 iemCImpl_repe_scas_ax_m16);
7745 case IEMMODE_32BIT:
7746 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7747 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7748 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7749 iemCImpl_repe_scas_ax_m32);
7750 case IEMMODE_64BIT:
7751 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7752 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7753 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7754 iemCImpl_repe_scas_ax_m64);
7755 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7756 }
7757 break;
7758 case IEMMODE_32BIT:
7759 switch (pVCpu->iem.s.enmEffAddrMode)
7760 {
7761 case IEMMODE_16BIT:
7762 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7763 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7764 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7765 iemCImpl_repe_scas_eax_m16);
7766 case IEMMODE_32BIT:
7767 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7768 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7769 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7770 iemCImpl_repe_scas_eax_m32);
7771 case IEMMODE_64BIT:
7772 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7773 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7774 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7775 iemCImpl_repe_scas_eax_m64);
7776 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7777 }
7778 case IEMMODE_64BIT:
7779 switch (pVCpu->iem.s.enmEffAddrMode)
7780 {
7781 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7782 case IEMMODE_32BIT:
7783 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7784 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7785 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7786 iemCImpl_repe_scas_rax_m32);
7787 case IEMMODE_64BIT:
7788 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7789 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7790 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7791 iemCImpl_repe_scas_rax_m64);
7792 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7793 }
7794 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7795 }
7796 }
7797 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7798 {
7799 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7801 switch (pVCpu->iem.s.enmEffOpSize)
7802 {
7803 case IEMMODE_16BIT:
7804 switch (pVCpu->iem.s.enmEffAddrMode)
7805 {
7806 case IEMMODE_16BIT:
7807 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7808 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7809 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7810 iemCImpl_repne_scas_ax_m16);
7811 case IEMMODE_32BIT:
7812 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7813 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7814 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7815 iemCImpl_repne_scas_ax_m32);
7816 case IEMMODE_64BIT:
7817 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7818 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7819 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7820 iemCImpl_repne_scas_ax_m64);
7821 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7822 }
7823 break;
7824 case IEMMODE_32BIT:
7825 switch (pVCpu->iem.s.enmEffAddrMode)
7826 {
7827 case IEMMODE_16BIT:
7828 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7829 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7830 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7831 iemCImpl_repne_scas_eax_m16);
7832 case IEMMODE_32BIT:
7833 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7834 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7835 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7836 iemCImpl_repne_scas_eax_m32);
7837 case IEMMODE_64BIT:
7838 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7839 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7840 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7841 iemCImpl_repne_scas_eax_m64);
7842 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7843 }
7844 case IEMMODE_64BIT:
7845 switch (pVCpu->iem.s.enmEffAddrMode)
7846 {
7847 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
7848 case IEMMODE_32BIT:
7849 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7850 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7851 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7852 iemCImpl_repne_scas_rax_m32);
7853 case IEMMODE_64BIT:
7854 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7855 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7856 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7857 iemCImpl_repne_scas_rax_m64);
7858 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7859 }
7860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7861 }
7862 }
7863
7864 /*
7865 * Annoying double switch here.
7866 * Using ugly macro for implementing the cases, sharing it with scasb.
7867 */
7868 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
7869 switch (pVCpu->iem.s.enmEffOpSize)
7870 {
7871 case IEMMODE_16BIT:
7872 switch (pVCpu->iem.s.enmEffAddrMode)
7873 {
7874 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7875 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7876 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
7877 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7878 }
7879 break;
7880
7881 case IEMMODE_32BIT:
7882 switch (pVCpu->iem.s.enmEffAddrMode)
7883 {
7884 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7885 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7886 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
7887 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7888 }
7889 break;
7890
7891 case IEMMODE_64BIT:
7892 switch (pVCpu->iem.s.enmEffAddrMode)
7893 {
7894 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7895 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
7896 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
7897 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7898 }
7899 break;
7900 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7901 }
7902}
7903
7904#undef IEM_SCAS_CASE
7905
7906/**
7907 * Common 'mov r8, imm8' helper.
7908 */
7909FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
7910{
7911 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7912 IEM_MC_BEGIN(0, 0, 0, 0);
7913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7914 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
7915 IEM_MC_ADVANCE_RIP_AND_FINISH();
7916 IEM_MC_END();
7917}
7918
7919
7920/**
7921 * @opcode 0xb0
7922 */
7923FNIEMOP_DEF(iemOp_mov_AL_Ib)
7924{
7925 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
7926 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7927}
7928
7929
7930/**
7931 * @opcode 0xb1
7932 */
7933FNIEMOP_DEF(iemOp_CL_Ib)
7934{
7935 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
7936 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7937}
7938
7939
7940/**
7941 * @opcode 0xb2
7942 */
7943FNIEMOP_DEF(iemOp_DL_Ib)
7944{
7945 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
7946 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7947}
7948
7949
7950/**
7951 * @opcode 0xb3
7952 */
7953FNIEMOP_DEF(iemOp_BL_Ib)
7954{
7955 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
7956 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7957}
7958
7959
7960/**
7961 * @opcode 0xb4
7962 */
7963FNIEMOP_DEF(iemOp_mov_AH_Ib)
7964{
7965 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
7966 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7967}
7968
7969
7970/**
7971 * @opcode 0xb5
7972 */
7973FNIEMOP_DEF(iemOp_CH_Ib)
7974{
7975 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
7976 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7977}
7978
7979
7980/**
7981 * @opcode 0xb6
7982 */
7983FNIEMOP_DEF(iemOp_DH_Ib)
7984{
7985 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
7986 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7987}
7988
7989
7990/**
7991 * @opcode 0xb7
7992 */
7993FNIEMOP_DEF(iemOp_BH_Ib)
7994{
7995 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
7996 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7997}
7998
7999
8000/**
8001 * Common 'mov regX,immX' helper.
8002 */
8003FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
8004{
8005 switch (pVCpu->iem.s.enmEffOpSize)
8006 {
8007 case IEMMODE_16BIT:
8008 IEM_MC_BEGIN(0, 0, 0, 0);
8009 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8011 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
8012 IEM_MC_ADVANCE_RIP_AND_FINISH();
8013 IEM_MC_END();
8014 break;
8015
8016 case IEMMODE_32BIT:
8017 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8018 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8020 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
8021 IEM_MC_ADVANCE_RIP_AND_FINISH();
8022 IEM_MC_END();
8023 break;
8024
8025 case IEMMODE_64BIT:
8026 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8027 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
8028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8029 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
8030 IEM_MC_ADVANCE_RIP_AND_FINISH();
8031 IEM_MC_END();
8032 break;
8033 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8034 }
8035}
8036
8037
8038/**
8039 * @opcode 0xb8
8040 */
8041FNIEMOP_DEF(iemOp_eAX_Iv)
8042{
8043 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
8044 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8045}
8046
8047
8048/**
8049 * @opcode 0xb9
8050 */
8051FNIEMOP_DEF(iemOp_eCX_Iv)
8052{
8053 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
8054 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8055}
8056
8057
8058/**
8059 * @opcode 0xba
8060 */
8061FNIEMOP_DEF(iemOp_eDX_Iv)
8062{
8063 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
8064 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8065}
8066
8067
8068/**
8069 * @opcode 0xbb
8070 */
8071FNIEMOP_DEF(iemOp_eBX_Iv)
8072{
8073 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
8074 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8075}
8076
8077
8078/**
8079 * @opcode 0xbc
8080 */
8081FNIEMOP_DEF(iemOp_eSP_Iv)
8082{
8083 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
8084 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8085}
8086
8087
8088/**
8089 * @opcode 0xbd
8090 */
8091FNIEMOP_DEF(iemOp_eBP_Iv)
8092{
8093 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
8094 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8095}
8096
8097
8098/**
8099 * @opcode 0xbe
8100 */
8101FNIEMOP_DEF(iemOp_eSI_Iv)
8102{
8103 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
8104 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8105}
8106
8107
8108/**
8109 * @opcode 0xbf
8110 */
8111FNIEMOP_DEF(iemOp_eDI_Iv)
8112{
8113 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
8114 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8115}
8116
8117
8118/**
8119 * @opcode 0xc0
8120 */
8121FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
8122{
8123 IEMOP_HLP_MIN_186();
8124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8125 PCIEMOPSHIFTSIZES pImpl;
8126 switch (IEM_GET_MODRM_REG_8(bRm))
8127 {
8128 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
8129 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
8130 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
8131 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
8132 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
8133 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
8134 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
8135 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8136 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8137 }
8138 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8139
8140 if (IEM_IS_MODRM_REG_MODE(bRm))
8141 {
8142 /* register */
8143 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8144 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
8145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8146 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8147 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8148 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8149 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8150 IEM_MC_REF_EFLAGS(pEFlags);
8151 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8152 IEM_MC_ADVANCE_RIP_AND_FINISH();
8153 IEM_MC_END();
8154 }
8155 else
8156 {
8157 /* memory */
8158 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0);
8159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8161
8162 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8164
8165 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8166 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8167 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8168
8169 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8170 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8171 IEM_MC_FETCH_EFLAGS(EFlags);
8172 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8173
8174 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8175 IEM_MC_COMMIT_EFLAGS(EFlags);
8176 IEM_MC_ADVANCE_RIP_AND_FINISH();
8177 IEM_MC_END();
8178 }
8179}
8180
8181
8182/**
8183 * @opcode 0xc1
8184 */
8185FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
8186{
8187 IEMOP_HLP_MIN_186();
8188 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8189 PCIEMOPSHIFTSIZES pImpl;
8190 switch (IEM_GET_MODRM_REG_8(bRm))
8191 {
8192 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
8193 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
8194 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
8195 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
8196 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
8197 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
8198 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
8199 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8200 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8201 }
8202 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8203
8204 if (IEM_IS_MODRM_REG_MODE(bRm))
8205 {
8206 /* register */
8207 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8208 switch (pVCpu->iem.s.enmEffOpSize)
8209 {
8210 case IEMMODE_16BIT:
8211 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
8212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8213 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8214 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8215 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8216 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8217 IEM_MC_REF_EFLAGS(pEFlags);
8218 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8219 IEM_MC_ADVANCE_RIP_AND_FINISH();
8220 IEM_MC_END();
8221 break;
8222
8223 case IEMMODE_32BIT:
8224 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8226 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8227 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8228 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8229 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8230 IEM_MC_REF_EFLAGS(pEFlags);
8231 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8232 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
8233 IEM_MC_ADVANCE_RIP_AND_FINISH();
8234 IEM_MC_END();
8235 break;
8236
8237 case IEMMODE_64BIT:
8238 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8240 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8241 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8242 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8243 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8244 IEM_MC_REF_EFLAGS(pEFlags);
8245 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8246 IEM_MC_ADVANCE_RIP_AND_FINISH();
8247 IEM_MC_END();
8248 break;
8249
8250 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8251 }
8252 }
8253 else
8254 {
8255 /* memory */
8256 switch (pVCpu->iem.s.enmEffOpSize)
8257 {
8258 case IEMMODE_16BIT:
8259 IEM_MC_BEGIN(3, 3, 0, 0);
8260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8262
8263 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8265
8266 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8267 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8268 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8269
8270 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8271 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8272 IEM_MC_FETCH_EFLAGS(EFlags);
8273 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8274
8275 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
8276 IEM_MC_COMMIT_EFLAGS(EFlags);
8277 IEM_MC_ADVANCE_RIP_AND_FINISH();
8278 IEM_MC_END();
8279 break;
8280
8281 case IEMMODE_32BIT:
8282 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8285
8286 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8288
8289 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8290 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8291 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8292
8293 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8294 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8295 IEM_MC_FETCH_EFLAGS(EFlags);
8296 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8297
8298 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
8299 IEM_MC_COMMIT_EFLAGS(EFlags);
8300 IEM_MC_ADVANCE_RIP_AND_FINISH();
8301 IEM_MC_END();
8302 break;
8303
8304 case IEMMODE_64BIT:
8305 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8308
8309 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8311
8312 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8313 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8314 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8315
8316 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8317 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8318 IEM_MC_FETCH_EFLAGS(EFlags);
8319 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8320
8321 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
8322 IEM_MC_COMMIT_EFLAGS(EFlags);
8323 IEM_MC_ADVANCE_RIP_AND_FINISH();
8324 IEM_MC_END();
8325 break;
8326
8327 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8328 }
8329 }
8330}
8331
8332
8333/**
8334 * @opcode 0xc2
8335 */
8336FNIEMOP_DEF(iemOp_retn_Iw)
8337{
8338 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
8339 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8340 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8342 switch (pVCpu->iem.s.enmEffOpSize)
8343 {
8344 case IEMMODE_16BIT:
8345 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_iw_16, u16Imm);
8346 case IEMMODE_32BIT:
8347 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_iw_32, u16Imm);
8348 case IEMMODE_64BIT:
8349 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_iw_64, u16Imm);
8350 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8351 }
8352}
8353
8354
8355/**
8356 * @opcode 0xc3
8357 */
8358FNIEMOP_DEF(iemOp_retn)
8359{
8360 IEMOP_MNEMONIC(retn, "retn");
8361 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8363 switch (pVCpu->iem.s.enmEffOpSize)
8364 {
8365 case IEMMODE_16BIT:
8366 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_16);
8367 case IEMMODE_32BIT:
8368 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_32);
8369 case IEMMODE_64BIT:
8370 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_64);
8371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8372 }
8373}
8374
8375
8376/**
8377 * @opcode 0xc4
8378 */
8379FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
8380{
8381 /* The LDS instruction is invalid 64-bit mode. In legacy and
8382 compatability mode it is invalid with MOD=3.
8383 The use as a VEX prefix is made possible by assigning the inverted
8384 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
8385 outside of 64-bit mode. VEX is not available in real or v86 mode. */
8386 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8387 if ( IEM_IS_64BIT_CODE(pVCpu)
8388 || IEM_IS_MODRM_REG_MODE(bRm) )
8389 {
8390 IEMOP_MNEMONIC(vex3_prefix, "vex3");
8391 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8392 {
8393 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8394 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8395 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
8396 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8397 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8398 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
8399 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
8400 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8401 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
8402 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
8403 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
8404 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
8405 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
8406
8407 switch (bRm & 0x1f)
8408 {
8409 case 1: /* 0x0f lead opcode byte. */
8410#ifdef IEM_WITH_VEX
8411 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8412#else
8413 IEMOP_BITCH_ABOUT_STUB();
8414 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8415#endif
8416
8417 case 2: /* 0x0f 0x38 lead opcode bytes. */
8418#ifdef IEM_WITH_VEX
8419 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8420#else
8421 IEMOP_BITCH_ABOUT_STUB();
8422 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8423#endif
8424
8425 case 3: /* 0x0f 0x3a lead opcode bytes. */
8426#ifdef IEM_WITH_VEX
8427 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8428#else
8429 IEMOP_BITCH_ABOUT_STUB();
8430 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8431#endif
8432
8433 default:
8434 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
8435 IEMOP_RAISE_INVALID_OPCODE_RET();
8436 }
8437 }
8438 Log(("VEX3: VEX support disabled!\n"));
8439 IEMOP_RAISE_INVALID_OPCODE_RET();
8440 }
8441
8442 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
8443 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
8444}
8445
8446
8447/**
8448 * @opcode 0xc5
8449 */
8450FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
8451{
8452 /* The LES instruction is invalid 64-bit mode. In legacy and
8453 compatability mode it is invalid with MOD=3.
8454 The use as a VEX prefix is made possible by assigning the inverted
8455 REX.R to the top MOD bit, and the top bit in the inverted register
8456 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
8457 to accessing registers 0..7 in this VEX form. */
8458 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8459 if ( IEM_IS_64BIT_CODE(pVCpu)
8460 || IEM_IS_MODRM_REG_MODE(bRm))
8461 {
8462 IEMOP_MNEMONIC(vex2_prefix, "vex2");
8463 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8464 {
8465 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8466 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8467 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8468 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8469 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8470 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
8471 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
8472 pVCpu->iem.s.idxPrefix = bRm & 0x3;
8473
8474#ifdef IEM_WITH_VEX
8475 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8476#else
8477 IEMOP_BITCH_ABOUT_STUB();
8478 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8479#endif
8480 }
8481
8482 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
8483 Log(("VEX2: VEX support disabled!\n"));
8484 IEMOP_RAISE_INVALID_OPCODE_RET();
8485 }
8486
8487 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
8488 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
8489}
8490
8491
8492/**
8493 * @opcode 0xc6
8494 */
8495FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
8496{
8497 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8498 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8499 IEMOP_RAISE_INVALID_OPCODE_RET();
8500 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
8501
8502 if (IEM_IS_MODRM_REG_MODE(bRm))
8503 {
8504 /* register access */
8505 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8506 IEM_MC_BEGIN(0, 0, 0, 0);
8507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8508 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
8509 IEM_MC_ADVANCE_RIP_AND_FINISH();
8510 IEM_MC_END();
8511 }
8512 else
8513 {
8514 /* memory access. */
8515 IEM_MC_BEGIN(0, 1, 0, 0);
8516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8518 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8520 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
8521 IEM_MC_ADVANCE_RIP_AND_FINISH();
8522 IEM_MC_END();
8523 }
8524}
8525
8526
8527/**
8528 * @opcode 0xc7
8529 */
8530FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
8531{
8532 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8533 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
8534 IEMOP_RAISE_INVALID_OPCODE_RET();
8535 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
8536
8537 if (IEM_IS_MODRM_REG_MODE(bRm))
8538 {
8539 /* register access */
8540 switch (pVCpu->iem.s.enmEffOpSize)
8541 {
8542 case IEMMODE_16BIT:
8543 IEM_MC_BEGIN(0, 0, 0, 0);
8544 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8546 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8547 IEM_MC_ADVANCE_RIP_AND_FINISH();
8548 IEM_MC_END();
8549 break;
8550
8551 case IEMMODE_32BIT:
8552 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8553 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8555 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8556 IEM_MC_ADVANCE_RIP_AND_FINISH();
8557 IEM_MC_END();
8558 break;
8559
8560 case IEMMODE_64BIT:
8561 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
8562 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8564 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8565 IEM_MC_ADVANCE_RIP_AND_FINISH();
8566 IEM_MC_END();
8567 break;
8568
8569 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8570 }
8571 }
8572 else
8573 {
8574 /* memory access. */
8575 switch (pVCpu->iem.s.enmEffOpSize)
8576 {
8577 case IEMMODE_16BIT:
8578 IEM_MC_BEGIN(0, 1, 0, 0);
8579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8581 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8583 IEM_MC_STORE_MEM_U16_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8584 IEM_MC_ADVANCE_RIP_AND_FINISH();
8585 IEM_MC_END();
8586 break;
8587
8588 case IEMMODE_32BIT:
8589 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8592 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8594 IEM_MC_STORE_MEM_U32_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8595 IEM_MC_ADVANCE_RIP_AND_FINISH();
8596 IEM_MC_END();
8597 break;
8598
8599 case IEMMODE_64BIT:
8600 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8603 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8605 IEM_MC_STORE_MEM_U64_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8606 IEM_MC_ADVANCE_RIP_AND_FINISH();
8607 IEM_MC_END();
8608 break;
8609
8610 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8611 }
8612 }
8613}
8614
8615
8616
8617
8618/**
8619 * @opcode 0xc8
8620 */
8621FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8622{
8623 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8624 IEMOP_HLP_MIN_186();
8625 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8626 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
8627 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
8628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8629 IEM_MC_DEFER_TO_CIMPL_3_RET(0,
8630 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8631 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8632 iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
8633}
8634
8635
8636/**
8637 * @opcode 0xc9
8638 */
8639FNIEMOP_DEF(iemOp_leave)
8640{
8641 IEMOP_MNEMONIC(leave, "leave");
8642 IEMOP_HLP_MIN_186();
8643 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8645 IEM_MC_DEFER_TO_CIMPL_1_RET(0,
8646 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8647 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8648 iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
8649}
8650
8651
8652/**
8653 * @opcode 0xca
8654 */
8655FNIEMOP_DEF(iemOp_retf_Iw)
8656{
8657 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
8658 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8660 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8661 | IEM_CIMPL_F_MODE,
8662 RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8663 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8664 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8665 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8666 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8667 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8668 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8669 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8670 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8671 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8672 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8673 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS),
8674 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
8675}
8676
8677
8678/**
8679 * @opcode 0xcb
8680 */
8681FNIEMOP_DEF(iemOp_retf)
8682{
8683 IEMOP_MNEMONIC(retf, "retf");
8684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8685 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8686 | IEM_CIMPL_F_MODE,
8687 RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8688 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8689 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8690 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8691 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8692 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8693 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8694 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8695 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8696 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8697 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8698 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS),
8699 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
8700}
8701
8702
8703/**
8704 * @opcode 0xcc
8705 */
8706FNIEMOP_DEF(iemOp_int3)
8707{
8708 IEMOP_MNEMONIC(int3, "int3");
8709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8710 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8711 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
8712 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
8713}
8714
8715
8716/**
8717 * @opcode 0xcd
8718 */
8719FNIEMOP_DEF(iemOp_int_Ib)
8720{
8721 IEMOP_MNEMONIC(int_Ib, "int Ib");
8722 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
8723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8724 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8725 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, UINT64_MAX,
8726 iemCImpl_int, u8Int, IEMINT_INTN);
8727 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8728}
8729
8730
8731/**
8732 * @opcode 0xce
8733 */
8734FNIEMOP_DEF(iemOp_into)
8735{
8736 IEMOP_MNEMONIC(into, "into");
8737 IEMOP_HLP_NO_64BIT();
8738 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8739 | IEM_CIMPL_F_BRANCH_CONDITIONAL | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8740 UINT64_MAX,
8741 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
8742 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8743}
8744
8745
8746/**
8747 * @opcode 0xcf
8748 */
8749FNIEMOP_DEF(iemOp_iret)
8750{
8751 IEMOP_MNEMONIC(iret, "iret");
8752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8753 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8754 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
8755 RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8756 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8757 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8758 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8759 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8760 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8761 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8762 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8763 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8764 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8765 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8766 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS),
8767 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
8768 /* Segment registers are sanitized when returning to an outer ring, or fully
8769 reloaded when returning to v86 mode. Thus the large flush list above. */
8770}
8771
8772
8773/**
8774 * @opcode 0xd0
8775 */
8776FNIEMOP_DEF(iemOp_Grp2_Eb_1)
8777{
8778 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8779 PCIEMOPSHIFTSIZES pImpl;
8780 switch (IEM_GET_MODRM_REG_8(bRm))
8781 {
8782 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
8783 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
8784 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
8785 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
8786 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
8787 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
8788 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
8789 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8790 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8791 }
8792 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8793
8794 if (IEM_IS_MODRM_REG_MODE(bRm))
8795 {
8796 /* register */
8797 IEM_MC_BEGIN(3, 0, 0, 0);
8798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8799 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8800 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8801 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8802 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8803 IEM_MC_REF_EFLAGS(pEFlags);
8804 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8805 IEM_MC_ADVANCE_RIP_AND_FINISH();
8806 IEM_MC_END();
8807 }
8808 else
8809 {
8810 /* memory */
8811 IEM_MC_BEGIN(3, 3, 0, 0);
8812 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8813 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8814 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8815 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8816 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8817
8818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8820 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8821 IEM_MC_FETCH_EFLAGS(EFlags);
8822 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8823
8824 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8825 IEM_MC_COMMIT_EFLAGS(EFlags);
8826 IEM_MC_ADVANCE_RIP_AND_FINISH();
8827 IEM_MC_END();
8828 }
8829}
8830
8831
8832
8833/**
8834 * @opcode 0xd1
8835 */
8836FNIEMOP_DEF(iemOp_Grp2_Ev_1)
8837{
8838 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8839 PCIEMOPSHIFTSIZES pImpl;
8840 switch (IEM_GET_MODRM_REG_8(bRm))
8841 {
8842 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
8843 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
8844 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
8845 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
8846 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
8847 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
8848 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
8849 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8850 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8851 }
8852 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8853
8854 if (IEM_IS_MODRM_REG_MODE(bRm))
8855 {
8856 /* register */
8857 switch (pVCpu->iem.s.enmEffOpSize)
8858 {
8859 case IEMMODE_16BIT:
8860 IEM_MC_BEGIN(3, 0, 0, 0);
8861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8862 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8863 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8864 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8865 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8866 IEM_MC_REF_EFLAGS(pEFlags);
8867 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8868 IEM_MC_ADVANCE_RIP_AND_FINISH();
8869 IEM_MC_END();
8870 break;
8871
8872 case IEMMODE_32BIT:
8873 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8875 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8876 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8877 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8878 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8879 IEM_MC_REF_EFLAGS(pEFlags);
8880 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8881 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
8882 IEM_MC_ADVANCE_RIP_AND_FINISH();
8883 IEM_MC_END();
8884 break;
8885
8886 case IEMMODE_64BIT:
8887 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8889 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8890 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8891 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8892 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8893 IEM_MC_REF_EFLAGS(pEFlags);
8894 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8895 IEM_MC_ADVANCE_RIP_AND_FINISH();
8896 IEM_MC_END();
8897 break;
8898
8899 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8900 }
8901 }
8902 else
8903 {
8904 /* memory */
8905 switch (pVCpu->iem.s.enmEffOpSize)
8906 {
8907 case IEMMODE_16BIT:
8908 IEM_MC_BEGIN(3, 3, 0, 0);
8909 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8910 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8911 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8912 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8913 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8914
8915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8917 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8918 IEM_MC_FETCH_EFLAGS(EFlags);
8919 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8920
8921 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
8922 IEM_MC_COMMIT_EFLAGS(EFlags);
8923 IEM_MC_ADVANCE_RIP_AND_FINISH();
8924 IEM_MC_END();
8925 break;
8926
8927 case IEMMODE_32BIT:
8928 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8929 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8930 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8931 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8932 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8933 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8934
8935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8937 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8938 IEM_MC_FETCH_EFLAGS(EFlags);
8939 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8940
8941 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
8942 IEM_MC_COMMIT_EFLAGS(EFlags);
8943 IEM_MC_ADVANCE_RIP_AND_FINISH();
8944 IEM_MC_END();
8945 break;
8946
8947 case IEMMODE_64BIT:
8948 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8949 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8950 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8951 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8953 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8954
8955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8957 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8958 IEM_MC_FETCH_EFLAGS(EFlags);
8959 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8960
8961 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
8962 IEM_MC_COMMIT_EFLAGS(EFlags);
8963 IEM_MC_ADVANCE_RIP_AND_FINISH();
8964 IEM_MC_END();
8965 break;
8966
8967 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8968 }
8969 }
8970}
8971
8972
8973/**
8974 * @opcode 0xd2
8975 */
8976FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
8977{
8978 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8979 PCIEMOPSHIFTSIZES pImpl;
8980 switch (IEM_GET_MODRM_REG_8(bRm))
8981 {
8982 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
8983 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
8984 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
8985 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
8986 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
8987 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
8988 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
8989 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8990 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
8991 }
8992 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8993
8994 if (IEM_IS_MODRM_REG_MODE(bRm))
8995 {
8996 /* register */
8997 IEM_MC_BEGIN(3, 0, 0, 0);
8998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8999 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9000 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9001 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9002 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9003 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9004 IEM_MC_REF_EFLAGS(pEFlags);
9005 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
9006 IEM_MC_ADVANCE_RIP_AND_FINISH();
9007 IEM_MC_END();
9008 }
9009 else
9010 {
9011 /* memory */
9012 IEM_MC_BEGIN(3, 3, 0, 0);
9013 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9014 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9015 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9017 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9018
9019 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9021 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9022 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9023 IEM_MC_FETCH_EFLAGS(EFlags);
9024 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
9025
9026 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
9027 IEM_MC_COMMIT_EFLAGS(EFlags);
9028 IEM_MC_ADVANCE_RIP_AND_FINISH();
9029 IEM_MC_END();
9030 }
9031}
9032
9033
9034/**
9035 * @opcode 0xd3
9036 */
9037FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
9038{
9039 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9040 PCIEMOPSHIFTSIZES pImpl;
9041 switch (IEM_GET_MODRM_REG_8(bRm))
9042 {
9043 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
9044 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
9045 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
9046 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
9047 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
9048 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
9049 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
9050 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9051 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
9052 }
9053 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
9054
9055 if (IEM_IS_MODRM_REG_MODE(bRm))
9056 {
9057 /* register */
9058 switch (pVCpu->iem.s.enmEffOpSize)
9059 {
9060 case IEMMODE_16BIT:
9061 IEM_MC_BEGIN(3, 0, 0, 0);
9062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9063 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9064 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9065 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9066 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9067 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9068 IEM_MC_REF_EFLAGS(pEFlags);
9069 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
9070 IEM_MC_ADVANCE_RIP_AND_FINISH();
9071 IEM_MC_END();
9072 break;
9073
9074 case IEMMODE_32BIT:
9075 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
9076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9077 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9078 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9079 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9080 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9081 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9082 IEM_MC_REF_EFLAGS(pEFlags);
9083 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
9084 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
9085 IEM_MC_ADVANCE_RIP_AND_FINISH();
9086 IEM_MC_END();
9087 break;
9088
9089 case IEMMODE_64BIT:
9090 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
9091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9092 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9093 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9094 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9095 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9096 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9097 IEM_MC_REF_EFLAGS(pEFlags);
9098 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
9099 IEM_MC_ADVANCE_RIP_AND_FINISH();
9100 IEM_MC_END();
9101 break;
9102
9103 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9104 }
9105 }
9106 else
9107 {
9108 /* memory */
9109 switch (pVCpu->iem.s.enmEffOpSize)
9110 {
9111 case IEMMODE_16BIT:
9112 IEM_MC_BEGIN(3, 3, 0, 0);
9113 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9114 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9115 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9117 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9118
9119 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9121 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9122 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9123 IEM_MC_FETCH_EFLAGS(EFlags);
9124 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
9125
9126 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
9127 IEM_MC_COMMIT_EFLAGS(EFlags);
9128 IEM_MC_ADVANCE_RIP_AND_FINISH();
9129 IEM_MC_END();
9130 break;
9131
9132 case IEMMODE_32BIT:
9133 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
9134 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9135 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9136 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9137 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9138 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9139
9140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9142 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9143 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9144 IEM_MC_FETCH_EFLAGS(EFlags);
9145 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
9146
9147 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
9148 IEM_MC_COMMIT_EFLAGS(EFlags);
9149 IEM_MC_ADVANCE_RIP_AND_FINISH();
9150 IEM_MC_END();
9151 break;
9152
9153 case IEMMODE_64BIT:
9154 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
9155 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9156 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9157 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9159 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9160
9161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9163 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9164 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9165 IEM_MC_FETCH_EFLAGS(EFlags);
9166 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
9167
9168 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
9169 IEM_MC_COMMIT_EFLAGS(EFlags);
9170 IEM_MC_ADVANCE_RIP_AND_FINISH();
9171 IEM_MC_END();
9172 break;
9173
9174 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9175 }
9176 }
9177}
9178
9179/**
9180 * @opcode 0xd4
9181 */
9182FNIEMOP_DEF(iemOp_aam_Ib)
9183{
9184 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
9185 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9187 IEMOP_HLP_NO_64BIT();
9188 if (!bImm)
9189 IEMOP_RAISE_DIVIDE_ERROR_RET();
9190 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aam, bImm);
9191}
9192
9193
9194/**
9195 * @opcode 0xd5
9196 */
9197FNIEMOP_DEF(iemOp_aad_Ib)
9198{
9199 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
9200 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9202 IEMOP_HLP_NO_64BIT();
9203 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aad, bImm);
9204}
9205
9206
9207/**
9208 * @opcode 0xd6
9209 */
9210FNIEMOP_DEF(iemOp_salc)
9211{
9212 IEMOP_MNEMONIC(salc, "salc");
9213 IEMOP_HLP_NO_64BIT();
9214
9215 IEM_MC_BEGIN(0, 0, 0, 0);
9216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9217 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9218 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
9219 } IEM_MC_ELSE() {
9220 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
9221 } IEM_MC_ENDIF();
9222 IEM_MC_ADVANCE_RIP_AND_FINISH();
9223 IEM_MC_END();
9224}
9225
9226
9227/**
9228 * @opcode 0xd7
9229 */
9230FNIEMOP_DEF(iemOp_xlat)
9231{
9232 IEMOP_MNEMONIC(xlat, "xlat");
9233 switch (pVCpu->iem.s.enmEffAddrMode)
9234 {
9235 case IEMMODE_16BIT:
9236 IEM_MC_BEGIN(2, 0, 0, 0);
9237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9238 IEM_MC_LOCAL(uint8_t, u8Tmp);
9239 IEM_MC_LOCAL(uint16_t, u16Addr);
9240 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
9241 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
9242 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
9243 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9244 IEM_MC_ADVANCE_RIP_AND_FINISH();
9245 IEM_MC_END();
9246 break;
9247
9248 case IEMMODE_32BIT:
9249 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
9250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9251 IEM_MC_LOCAL(uint8_t, u8Tmp);
9252 IEM_MC_LOCAL(uint32_t, u32Addr);
9253 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
9254 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
9255 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
9256 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9257 IEM_MC_ADVANCE_RIP_AND_FINISH();
9258 IEM_MC_END();
9259 break;
9260
9261 case IEMMODE_64BIT:
9262 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
9263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9264 IEM_MC_LOCAL(uint8_t, u8Tmp);
9265 IEM_MC_LOCAL(uint64_t, u64Addr);
9266 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
9267 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
9268 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
9269 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9270 IEM_MC_ADVANCE_RIP_AND_FINISH();
9271 IEM_MC_END();
9272 break;
9273
9274 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9275 }
9276}
9277
9278
9279/**
9280 * Common worker for FPU instructions working on ST0 and STn, and storing the
9281 * result in ST0.
9282 *
9283 * @param bRm Mod R/M byte.
9284 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9285 */
9286FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9287{
9288 IEM_MC_BEGIN(3, 1, 0, 0);
9289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9290 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9291 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9292 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9293 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9294
9295 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9296 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9297 IEM_MC_PREPARE_FPU_USAGE();
9298 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9299 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9300 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9301 } IEM_MC_ELSE() {
9302 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9303 } IEM_MC_ENDIF();
9304 IEM_MC_ADVANCE_RIP_AND_FINISH();
9305
9306 IEM_MC_END();
9307}
9308
9309
9310/**
9311 * Common worker for FPU instructions working on ST0 and STn, and only affecting
9312 * flags.
9313 *
9314 * @param bRm Mod R/M byte.
9315 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9316 */
9317FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9318{
9319 IEM_MC_BEGIN(3, 1, 0, 0);
9320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9321 IEM_MC_LOCAL(uint16_t, u16Fsw);
9322 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9323 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9324 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9325
9326 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9327 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9328 IEM_MC_PREPARE_FPU_USAGE();
9329 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9330 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9331 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9332 } IEM_MC_ELSE() {
9333 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9334 } IEM_MC_ENDIF();
9335 IEM_MC_ADVANCE_RIP_AND_FINISH();
9336
9337 IEM_MC_END();
9338}
9339
9340
9341/**
9342 * Common worker for FPU instructions working on ST0 and STn, only affecting
9343 * flags, and popping when done.
9344 *
9345 * @param bRm Mod R/M byte.
9346 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9347 */
9348FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9349{
9350 IEM_MC_BEGIN(3, 1, 0, 0);
9351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9352 IEM_MC_LOCAL(uint16_t, u16Fsw);
9353 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9354 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9355 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9356
9357 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9358 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9359 IEM_MC_PREPARE_FPU_USAGE();
9360 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9361 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9362 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9363 } IEM_MC_ELSE() {
9364 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9365 } IEM_MC_ENDIF();
9366 IEM_MC_ADVANCE_RIP_AND_FINISH();
9367
9368 IEM_MC_END();
9369}
9370
9371
9372/** Opcode 0xd8 11/0. */
9373FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
9374{
9375 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
9376 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
9377}
9378
9379
9380/** Opcode 0xd8 11/1. */
9381FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
9382{
9383 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
9384 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
9385}
9386
9387
9388/** Opcode 0xd8 11/2. */
9389FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
9390{
9391 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
9392 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
9393}
9394
9395
9396/** Opcode 0xd8 11/3. */
9397FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
9398{
9399 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
9400 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
9401}
9402
9403
9404/** Opcode 0xd8 11/4. */
9405FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
9406{
9407 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
9408 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
9409}
9410
9411
9412/** Opcode 0xd8 11/5. */
9413FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
9414{
9415 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
9416 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
9417}
9418
9419
9420/** Opcode 0xd8 11/6. */
9421FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
9422{
9423 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
9424 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
9425}
9426
9427
9428/** Opcode 0xd8 11/7. */
9429FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
9430{
9431 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
9432 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
9433}
9434
9435
9436/**
9437 * Common worker for FPU instructions working on ST0 and an m32r, and storing
9438 * the result in ST0.
9439 *
9440 * @param bRm Mod R/M byte.
9441 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9442 */
9443FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
9444{
9445 IEM_MC_BEGIN(3, 3, 0, 0);
9446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9447 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9448 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9449 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9450 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9451 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9452
9453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9455
9456 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9457 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9458 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9459
9460 IEM_MC_PREPARE_FPU_USAGE();
9461 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9462 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
9463 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9464 } IEM_MC_ELSE() {
9465 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9466 } IEM_MC_ENDIF();
9467 IEM_MC_ADVANCE_RIP_AND_FINISH();
9468
9469 IEM_MC_END();
9470}
9471
9472
9473/** Opcode 0xd8 !11/0. */
9474FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
9475{
9476 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
9477 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
9478}
9479
9480
9481/** Opcode 0xd8 !11/1. */
9482FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
9483{
9484 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
9485 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
9486}
9487
9488
9489/** Opcode 0xd8 !11/2. */
9490FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
9491{
9492 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
9493
9494 IEM_MC_BEGIN(3, 3, 0, 0);
9495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9496 IEM_MC_LOCAL(uint16_t, u16Fsw);
9497 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9498 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9499 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9500 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9501
9502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9504
9505 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9506 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9507 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9508
9509 IEM_MC_PREPARE_FPU_USAGE();
9510 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9511 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
9512 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9513 } IEM_MC_ELSE() {
9514 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9515 } IEM_MC_ENDIF();
9516 IEM_MC_ADVANCE_RIP_AND_FINISH();
9517
9518 IEM_MC_END();
9519}
9520
9521
9522/** Opcode 0xd8 !11/3. */
9523FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
9524{
9525 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
9526
9527 IEM_MC_BEGIN(3, 3, 0, 0);
9528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9529 IEM_MC_LOCAL(uint16_t, u16Fsw);
9530 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9531 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9532 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9533 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9534
9535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9537
9538 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9539 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9540 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9541
9542 IEM_MC_PREPARE_FPU_USAGE();
9543 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9544 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
9545 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9546 } IEM_MC_ELSE() {
9547 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9548 } IEM_MC_ENDIF();
9549 IEM_MC_ADVANCE_RIP_AND_FINISH();
9550
9551 IEM_MC_END();
9552}
9553
9554
9555/** Opcode 0xd8 !11/4. */
9556FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
9557{
9558 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
9559 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
9560}
9561
9562
9563/** Opcode 0xd8 !11/5. */
9564FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
9565{
9566 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
9567 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
9568}
9569
9570
9571/** Opcode 0xd8 !11/6. */
9572FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
9573{
9574 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
9575 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
9576}
9577
9578
9579/** Opcode 0xd8 !11/7. */
9580FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
9581{
9582 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
9583 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
9584}
9585
9586
9587/**
9588 * @opcode 0xd8
9589 */
9590FNIEMOP_DEF(iemOp_EscF0)
9591{
9592 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9593 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
9594
9595 if (IEM_IS_MODRM_REG_MODE(bRm))
9596 {
9597 switch (IEM_GET_MODRM_REG_8(bRm))
9598 {
9599 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
9600 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
9601 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
9602 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9603 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
9604 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
9605 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
9606 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
9607 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9608 }
9609 }
9610 else
9611 {
9612 switch (IEM_GET_MODRM_REG_8(bRm))
9613 {
9614 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
9615 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
9616 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
9617 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
9618 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
9619 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
9620 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
9621 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
9622 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9623 }
9624 }
9625}
9626
9627
9628/** Opcode 0xd9 /0 mem32real
9629 * @sa iemOp_fld_m64r */
9630FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
9631{
9632 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
9633
9634 IEM_MC_BEGIN(2, 3, 0, 0);
9635 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9636 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9637 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
9638 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9639 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
9640
9641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9643
9644 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9645 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9646 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9647 IEM_MC_PREPARE_FPU_USAGE();
9648 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9649 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
9650 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9651 } IEM_MC_ELSE() {
9652 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9653 } IEM_MC_ENDIF();
9654 IEM_MC_ADVANCE_RIP_AND_FINISH();
9655
9656 IEM_MC_END();
9657}
9658
9659
9660/** Opcode 0xd9 !11/2 mem32real */
9661FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
9662{
9663 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
9664 IEM_MC_BEGIN(3, 2, 0, 0);
9665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9666 IEM_MC_LOCAL(uint16_t, u16Fsw);
9667 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9668 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9669 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9670
9671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9673 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9674 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9675
9676 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9677 IEM_MC_PREPARE_FPU_USAGE();
9678 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9679 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9680 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9681 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9682 } IEM_MC_ELSE() {
9683 IEM_MC_IF_FCW_IM() {
9684 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9685 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9686 } IEM_MC_ENDIF();
9687 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9688 } IEM_MC_ENDIF();
9689 IEM_MC_ADVANCE_RIP_AND_FINISH();
9690
9691 IEM_MC_END();
9692}
9693
9694
9695/** Opcode 0xd9 !11/3 */
9696FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
9697{
9698 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
9699 IEM_MC_BEGIN(3, 2, 0, 0);
9700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9701 IEM_MC_LOCAL(uint16_t, u16Fsw);
9702 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9703 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9704 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9705
9706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9708 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9709 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9710
9711 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9712 IEM_MC_PREPARE_FPU_USAGE();
9713 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9714 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9715 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9716 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9717 } IEM_MC_ELSE() {
9718 IEM_MC_IF_FCW_IM() {
9719 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9720 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9721 } IEM_MC_ENDIF();
9722 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9723 } IEM_MC_ENDIF();
9724 IEM_MC_ADVANCE_RIP_AND_FINISH();
9725
9726 IEM_MC_END();
9727}
9728
9729
9730/** Opcode 0xd9 !11/4 */
9731FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
9732{
9733 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
9734 IEM_MC_BEGIN(3, 0, 0, 0);
9735 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9737
9738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9739 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9740 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9741
9742 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9743 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
9744 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9745 IEM_MC_END();
9746}
9747
9748
9749/** Opcode 0xd9 !11/5 */
9750FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
9751{
9752 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
9753 IEM_MC_BEGIN(1, 1, 0, 0);
9754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9756
9757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9758 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9759 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9760
9761 IEM_MC_ARG(uint16_t, u16Fsw, 0);
9762 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9763
9764 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, 0, iemCImpl_fldcw, u16Fsw);
9765 IEM_MC_END();
9766}
9767
9768
9769/** Opcode 0xd9 !11/6 */
9770FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
9771{
9772 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
9773 IEM_MC_BEGIN(3, 0, 0, 0);
9774 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9776
9777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9778 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9779 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9780
9781 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9782 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
9783 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
9784 IEM_MC_END();
9785}
9786
9787
9788/** Opcode 0xd9 !11/7 */
9789FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
9790{
9791 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
9792 IEM_MC_BEGIN(2, 0, 0, 0);
9793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9794 IEM_MC_LOCAL(uint16_t, u16Fcw);
9795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9797 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9798 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9799 IEM_MC_FETCH_FCW(u16Fcw);
9800 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
9801 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9802 IEM_MC_END();
9803}
9804
9805
9806/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
9807FNIEMOP_DEF(iemOp_fnop)
9808{
9809 IEMOP_MNEMONIC(fnop, "fnop");
9810 IEM_MC_BEGIN(0, 0, 0, 0);
9811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9812 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9813 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9814 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9815 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
9816 * intel optimizations. Investigate. */
9817 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9818 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9819 IEM_MC_END();
9820}
9821
9822
9823/** Opcode 0xd9 11/0 stN */
9824FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
9825{
9826 IEMOP_MNEMONIC(fld_stN, "fld stN");
9827 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9828 * indicates that it does. */
9829 IEM_MC_BEGIN(0, 2, 0, 0);
9830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9831 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9832 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9833 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9834 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9835
9836 IEM_MC_PREPARE_FPU_USAGE();
9837 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
9838 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9839 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9840 } IEM_MC_ELSE() {
9841 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
9842 } IEM_MC_ENDIF();
9843
9844 IEM_MC_ADVANCE_RIP_AND_FINISH();
9845 IEM_MC_END();
9846}
9847
9848
9849/** Opcode 0xd9 11/3 stN */
9850FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
9851{
9852 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
9853 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9854 * indicates that it does. */
9855 IEM_MC_BEGIN(2, 3, 0, 0);
9856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9857 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
9858 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
9859 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9860 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
9861 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
9862 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9863 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9864
9865 IEM_MC_PREPARE_FPU_USAGE();
9866 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9867 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
9868 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
9869 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9870 } IEM_MC_ELSE() {
9871 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
9872 } IEM_MC_ENDIF();
9873
9874 IEM_MC_ADVANCE_RIP_AND_FINISH();
9875 IEM_MC_END();
9876}
9877
9878
9879/** Opcode 0xd9 11/4, 0xdd 11/2. */
9880FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
9881{
9882 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
9883
9884 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
9885 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
9886 if (!iDstReg)
9887 {
9888 IEM_MC_BEGIN(0, 1, 0, 0);
9889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9890 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
9891 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9892 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9893
9894 IEM_MC_PREPARE_FPU_USAGE();
9895 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
9896 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9897 } IEM_MC_ELSE() {
9898 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
9899 } IEM_MC_ENDIF();
9900
9901 IEM_MC_ADVANCE_RIP_AND_FINISH();
9902 IEM_MC_END();
9903 }
9904 else
9905 {
9906 IEM_MC_BEGIN(0, 2, 0, 0);
9907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9908 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9909 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9910 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9911 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9912
9913 IEM_MC_PREPARE_FPU_USAGE();
9914 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9915 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9916 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
9917 } IEM_MC_ELSE() {
9918 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
9919 } IEM_MC_ENDIF();
9920
9921 IEM_MC_ADVANCE_RIP_AND_FINISH();
9922 IEM_MC_END();
9923 }
9924}
9925
9926
9927/**
9928 * Common worker for FPU instructions working on ST0 and replaces it with the
9929 * result, i.e. unary operators.
9930 *
9931 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9932 */
9933FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
9934{
9935 IEM_MC_BEGIN(2, 1, 0, 0);
9936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9937 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9938 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9939 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9940
9941 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9942 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9943 IEM_MC_PREPARE_FPU_USAGE();
9944 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9945 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
9946 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9947 } IEM_MC_ELSE() {
9948 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9949 } IEM_MC_ENDIF();
9950 IEM_MC_ADVANCE_RIP_AND_FINISH();
9951
9952 IEM_MC_END();
9953}
9954
9955
9956/** Opcode 0xd9 0xe0. */
9957FNIEMOP_DEF(iemOp_fchs)
9958{
9959 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
9960 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
9961}
9962
9963
9964/** Opcode 0xd9 0xe1. */
9965FNIEMOP_DEF(iemOp_fabs)
9966{
9967 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
9968 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
9969}
9970
9971
9972/** Opcode 0xd9 0xe4. */
9973FNIEMOP_DEF(iemOp_ftst)
9974{
9975 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
9976 IEM_MC_BEGIN(2, 1, 0, 0);
9977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9978 IEM_MC_LOCAL(uint16_t, u16Fsw);
9979 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9980 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9981
9982 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9983 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9984 IEM_MC_PREPARE_FPU_USAGE();
9985 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9986 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
9987 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9988 } IEM_MC_ELSE() {
9989 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9990 } IEM_MC_ENDIF();
9991 IEM_MC_ADVANCE_RIP_AND_FINISH();
9992
9993 IEM_MC_END();
9994}
9995
9996
9997/** Opcode 0xd9 0xe5. */
9998FNIEMOP_DEF(iemOp_fxam)
9999{
10000 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
10001 IEM_MC_BEGIN(2, 1, 0, 0);
10002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10003 IEM_MC_LOCAL(uint16_t, u16Fsw);
10004 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10005 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10006
10007 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10008 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10009 IEM_MC_PREPARE_FPU_USAGE();
10010 IEM_MC_REF_FPUREG(pr80Value, 0);
10011 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
10012 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10013 IEM_MC_ADVANCE_RIP_AND_FINISH();
10014
10015 IEM_MC_END();
10016}
10017
10018
10019/**
10020 * Common worker for FPU instructions pushing a constant onto the FPU stack.
10021 *
10022 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10023 */
10024FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
10025{
10026 IEM_MC_BEGIN(1, 1, 0, 0);
10027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10028 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10029 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10030
10031 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10032 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10033 IEM_MC_PREPARE_FPU_USAGE();
10034 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10035 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
10036 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10037 } IEM_MC_ELSE() {
10038 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
10039 } IEM_MC_ENDIF();
10040 IEM_MC_ADVANCE_RIP_AND_FINISH();
10041
10042 IEM_MC_END();
10043}
10044
10045
10046/** Opcode 0xd9 0xe8. */
10047FNIEMOP_DEF(iemOp_fld1)
10048{
10049 IEMOP_MNEMONIC(fld1, "fld1");
10050 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
10051}
10052
10053
10054/** Opcode 0xd9 0xe9. */
10055FNIEMOP_DEF(iemOp_fldl2t)
10056{
10057 IEMOP_MNEMONIC(fldl2t, "fldl2t");
10058 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
10059}
10060
10061
10062/** Opcode 0xd9 0xea. */
10063FNIEMOP_DEF(iemOp_fldl2e)
10064{
10065 IEMOP_MNEMONIC(fldl2e, "fldl2e");
10066 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
10067}
10068
10069/** Opcode 0xd9 0xeb. */
10070FNIEMOP_DEF(iemOp_fldpi)
10071{
10072 IEMOP_MNEMONIC(fldpi, "fldpi");
10073 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
10074}
10075
10076
10077/** Opcode 0xd9 0xec. */
10078FNIEMOP_DEF(iemOp_fldlg2)
10079{
10080 IEMOP_MNEMONIC(fldlg2, "fldlg2");
10081 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
10082}
10083
10084/** Opcode 0xd9 0xed. */
10085FNIEMOP_DEF(iemOp_fldln2)
10086{
10087 IEMOP_MNEMONIC(fldln2, "fldln2");
10088 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
10089}
10090
10091
10092/** Opcode 0xd9 0xee. */
10093FNIEMOP_DEF(iemOp_fldz)
10094{
10095 IEMOP_MNEMONIC(fldz, "fldz");
10096 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
10097}
10098
10099
10100/** Opcode 0xd9 0xf0.
10101 *
10102 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
10103 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
10104 * to produce proper results for +Inf and -Inf.
10105 *
10106 * This is probably usful in the implementation pow() and similar.
10107 */
10108FNIEMOP_DEF(iemOp_f2xm1)
10109{
10110 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
10111 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
10112}
10113
10114
10115/**
10116 * Common worker for FPU instructions working on STn and ST0, storing the result
10117 * in STn, and popping the stack unless IE, DE or ZE was raised.
10118 *
10119 * @param bRm Mod R/M byte.
10120 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10121 */
10122FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10123{
10124 IEM_MC_BEGIN(3, 1, 0, 0);
10125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10126 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10127 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10128 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10129 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10130
10131 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10132 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10133
10134 IEM_MC_PREPARE_FPU_USAGE();
10135 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10136 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10137 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10138 } IEM_MC_ELSE() {
10139 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10140 } IEM_MC_ENDIF();
10141 IEM_MC_ADVANCE_RIP_AND_FINISH();
10142
10143 IEM_MC_END();
10144}
10145
10146
10147/** Opcode 0xd9 0xf1. */
10148FNIEMOP_DEF(iemOp_fyl2x)
10149{
10150 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
10151 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
10152}
10153
10154
10155/**
10156 * Common worker for FPU instructions working on ST0 and having two outputs, one
10157 * replacing ST0 and one pushed onto the stack.
10158 *
10159 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10160 */
10161FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
10162{
10163 IEM_MC_BEGIN(2, 1, 0, 0);
10164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10165 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
10166 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
10167 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10168
10169 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10170 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10171 IEM_MC_PREPARE_FPU_USAGE();
10172 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10173 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
10174 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
10175 } IEM_MC_ELSE() {
10176 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
10177 } IEM_MC_ENDIF();
10178 IEM_MC_ADVANCE_RIP_AND_FINISH();
10179
10180 IEM_MC_END();
10181}
10182
10183
10184/** Opcode 0xd9 0xf2. */
10185FNIEMOP_DEF(iemOp_fptan)
10186{
10187 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
10188 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
10189}
10190
10191
10192/** Opcode 0xd9 0xf3. */
10193FNIEMOP_DEF(iemOp_fpatan)
10194{
10195 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
10196 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
10197}
10198
10199
10200/** Opcode 0xd9 0xf4. */
10201FNIEMOP_DEF(iemOp_fxtract)
10202{
10203 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
10204 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
10205}
10206
10207
10208/** Opcode 0xd9 0xf5. */
10209FNIEMOP_DEF(iemOp_fprem1)
10210{
10211 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
10212 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
10213}
10214
10215
10216/** Opcode 0xd9 0xf6. */
10217FNIEMOP_DEF(iemOp_fdecstp)
10218{
10219 IEMOP_MNEMONIC(fdecstp, "fdecstp");
10220 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10221 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10222 * FINCSTP and FDECSTP. */
10223 IEM_MC_BEGIN(0, 0, 0, 0);
10224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10225
10226 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10227 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10228
10229 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10230 IEM_MC_FPU_STACK_DEC_TOP();
10231 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10232
10233 IEM_MC_ADVANCE_RIP_AND_FINISH();
10234 IEM_MC_END();
10235}
10236
10237
10238/** Opcode 0xd9 0xf7. */
10239FNIEMOP_DEF(iemOp_fincstp)
10240{
10241 IEMOP_MNEMONIC(fincstp, "fincstp");
10242 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10243 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10244 * FINCSTP and FDECSTP. */
10245 IEM_MC_BEGIN(0, 0, 0, 0);
10246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10247
10248 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10249 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10250
10251 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10252 IEM_MC_FPU_STACK_INC_TOP();
10253 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10254
10255 IEM_MC_ADVANCE_RIP_AND_FINISH();
10256 IEM_MC_END();
10257}
10258
10259
10260/** Opcode 0xd9 0xf8. */
10261FNIEMOP_DEF(iemOp_fprem)
10262{
10263 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
10264 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
10265}
10266
10267
10268/** Opcode 0xd9 0xf9. */
10269FNIEMOP_DEF(iemOp_fyl2xp1)
10270{
10271 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
10272 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
10273}
10274
10275
10276/** Opcode 0xd9 0xfa. */
10277FNIEMOP_DEF(iemOp_fsqrt)
10278{
10279 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
10280 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
10281}
10282
10283
10284/** Opcode 0xd9 0xfb. */
10285FNIEMOP_DEF(iemOp_fsincos)
10286{
10287 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
10288 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
10289}
10290
10291
10292/** Opcode 0xd9 0xfc. */
10293FNIEMOP_DEF(iemOp_frndint)
10294{
10295 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
10296 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
10297}
10298
10299
10300/** Opcode 0xd9 0xfd. */
10301FNIEMOP_DEF(iemOp_fscale)
10302{
10303 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
10304 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
10305}
10306
10307
10308/** Opcode 0xd9 0xfe. */
10309FNIEMOP_DEF(iemOp_fsin)
10310{
10311 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
10312 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
10313}
10314
10315
10316/** Opcode 0xd9 0xff. */
10317FNIEMOP_DEF(iemOp_fcos)
10318{
10319 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
10320 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
10321}
10322
10323
10324/** Used by iemOp_EscF1. */
10325IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
10326{
10327 /* 0xe0 */ iemOp_fchs,
10328 /* 0xe1 */ iemOp_fabs,
10329 /* 0xe2 */ iemOp_Invalid,
10330 /* 0xe3 */ iemOp_Invalid,
10331 /* 0xe4 */ iemOp_ftst,
10332 /* 0xe5 */ iemOp_fxam,
10333 /* 0xe6 */ iemOp_Invalid,
10334 /* 0xe7 */ iemOp_Invalid,
10335 /* 0xe8 */ iemOp_fld1,
10336 /* 0xe9 */ iemOp_fldl2t,
10337 /* 0xea */ iemOp_fldl2e,
10338 /* 0xeb */ iemOp_fldpi,
10339 /* 0xec */ iemOp_fldlg2,
10340 /* 0xed */ iemOp_fldln2,
10341 /* 0xee */ iemOp_fldz,
10342 /* 0xef */ iemOp_Invalid,
10343 /* 0xf0 */ iemOp_f2xm1,
10344 /* 0xf1 */ iemOp_fyl2x,
10345 /* 0xf2 */ iemOp_fptan,
10346 /* 0xf3 */ iemOp_fpatan,
10347 /* 0xf4 */ iemOp_fxtract,
10348 /* 0xf5 */ iemOp_fprem1,
10349 /* 0xf6 */ iemOp_fdecstp,
10350 /* 0xf7 */ iemOp_fincstp,
10351 /* 0xf8 */ iemOp_fprem,
10352 /* 0xf9 */ iemOp_fyl2xp1,
10353 /* 0xfa */ iemOp_fsqrt,
10354 /* 0xfb */ iemOp_fsincos,
10355 /* 0xfc */ iemOp_frndint,
10356 /* 0xfd */ iemOp_fscale,
10357 /* 0xfe */ iemOp_fsin,
10358 /* 0xff */ iemOp_fcos
10359};
10360
10361
10362/**
10363 * @opcode 0xd9
10364 */
10365FNIEMOP_DEF(iemOp_EscF1)
10366{
10367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10368 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
10369
10370 if (IEM_IS_MODRM_REG_MODE(bRm))
10371 {
10372 switch (IEM_GET_MODRM_REG_8(bRm))
10373 {
10374 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
10375 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
10376 case 2:
10377 if (bRm == 0xd0)
10378 return FNIEMOP_CALL(iemOp_fnop);
10379 IEMOP_RAISE_INVALID_OPCODE_RET();
10380 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
10381 case 4:
10382 case 5:
10383 case 6:
10384 case 7:
10385 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
10386 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
10387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10388 }
10389 }
10390 else
10391 {
10392 switch (IEM_GET_MODRM_REG_8(bRm))
10393 {
10394 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
10395 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
10396 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
10397 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
10398 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
10399 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
10400 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
10401 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
10402 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10403 }
10404 }
10405}
10406
10407
10408/** Opcode 0xda 11/0. */
10409FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
10410{
10411 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
10412 IEM_MC_BEGIN(0, 1, 0, 0);
10413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10414 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10415
10416 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10417 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10418
10419 IEM_MC_PREPARE_FPU_USAGE();
10420 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10421 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10422 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10423 } IEM_MC_ENDIF();
10424 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10425 } IEM_MC_ELSE() {
10426 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10427 } IEM_MC_ENDIF();
10428 IEM_MC_ADVANCE_RIP_AND_FINISH();
10429
10430 IEM_MC_END();
10431}
10432
10433
10434/** Opcode 0xda 11/1. */
10435FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
10436{
10437 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
10438 IEM_MC_BEGIN(0, 1, 0, 0);
10439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10440 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10441
10442 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10443 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10444
10445 IEM_MC_PREPARE_FPU_USAGE();
10446 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10447 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10448 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10449 } IEM_MC_ENDIF();
10450 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10451 } IEM_MC_ELSE() {
10452 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10453 } IEM_MC_ENDIF();
10454 IEM_MC_ADVANCE_RIP_AND_FINISH();
10455
10456 IEM_MC_END();
10457}
10458
10459
10460/** Opcode 0xda 11/2. */
10461FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
10462{
10463 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
10464 IEM_MC_BEGIN(0, 1, 0, 0);
10465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10466 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10467
10468 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10469 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10470
10471 IEM_MC_PREPARE_FPU_USAGE();
10472 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10473 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10474 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10475 } IEM_MC_ENDIF();
10476 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10477 } IEM_MC_ELSE() {
10478 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10479 } IEM_MC_ENDIF();
10480 IEM_MC_ADVANCE_RIP_AND_FINISH();
10481
10482 IEM_MC_END();
10483}
10484
10485
10486/** Opcode 0xda 11/3. */
10487FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
10488{
10489 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
10490 IEM_MC_BEGIN(0, 1, 0, 0);
10491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10492 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10493
10494 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10495 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10496
10497 IEM_MC_PREPARE_FPU_USAGE();
10498 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10499 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10500 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10501 } IEM_MC_ENDIF();
10502 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10503 } IEM_MC_ELSE() {
10504 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10505 } IEM_MC_ENDIF();
10506 IEM_MC_ADVANCE_RIP_AND_FINISH();
10507
10508 IEM_MC_END();
10509}
10510
10511
10512/**
10513 * Common worker for FPU instructions working on ST0 and ST1, only affecting
10514 * flags, and popping twice when done.
10515 *
10516 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10517 */
10518FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10519{
10520 IEM_MC_BEGIN(3, 1, 0, 0);
10521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10522 IEM_MC_LOCAL(uint16_t, u16Fsw);
10523 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10524 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10525 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10526
10527 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10528 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10529
10530 IEM_MC_PREPARE_FPU_USAGE();
10531 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
10532 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10533 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10534 } IEM_MC_ELSE() {
10535 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
10536 } IEM_MC_ENDIF();
10537 IEM_MC_ADVANCE_RIP_AND_FINISH();
10538
10539 IEM_MC_END();
10540}
10541
10542
10543/** Opcode 0xda 0xe9. */
10544FNIEMOP_DEF(iemOp_fucompp)
10545{
10546 IEMOP_MNEMONIC(fucompp, "fucompp");
10547 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
10548}
10549
10550
10551/**
10552 * Common worker for FPU instructions working on ST0 and an m32i, and storing
10553 * the result in ST0.
10554 *
10555 * @param bRm Mod R/M byte.
10556 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10557 */
10558FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
10559{
10560 IEM_MC_BEGIN(3, 3, 0, 0);
10561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10562 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10563 IEM_MC_LOCAL(int32_t, i32Val2);
10564 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10565 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10566 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10567
10568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10570
10571 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10572 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10573 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10574
10575 IEM_MC_PREPARE_FPU_USAGE();
10576 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10577 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
10578 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10579 } IEM_MC_ELSE() {
10580 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10581 } IEM_MC_ENDIF();
10582 IEM_MC_ADVANCE_RIP_AND_FINISH();
10583
10584 IEM_MC_END();
10585}
10586
10587
10588/** Opcode 0xda !11/0. */
10589FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
10590{
10591 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
10592 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
10593}
10594
10595
10596/** Opcode 0xda !11/1. */
10597FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
10598{
10599 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
10600 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
10601}
10602
10603
10604/** Opcode 0xda !11/2. */
10605FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
10606{
10607 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
10608
10609 IEM_MC_BEGIN(3, 3, 0, 0);
10610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10611 IEM_MC_LOCAL(uint16_t, u16Fsw);
10612 IEM_MC_LOCAL(int32_t, i32Val2);
10613 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10614 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10615 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10616
10617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10619
10620 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10621 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10622 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10623
10624 IEM_MC_PREPARE_FPU_USAGE();
10625 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10626 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10627 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10628 } IEM_MC_ELSE() {
10629 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10630 } IEM_MC_ENDIF();
10631 IEM_MC_ADVANCE_RIP_AND_FINISH();
10632
10633 IEM_MC_END();
10634}
10635
10636
10637/** Opcode 0xda !11/3. */
10638FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
10639{
10640 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
10641
10642 IEM_MC_BEGIN(3, 3, 0, 0);
10643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10644 IEM_MC_LOCAL(uint16_t, u16Fsw);
10645 IEM_MC_LOCAL(int32_t, i32Val2);
10646 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10647 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10648 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10649
10650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10652
10653 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10654 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10655 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10656
10657 IEM_MC_PREPARE_FPU_USAGE();
10658 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10659 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10660 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10661 } IEM_MC_ELSE() {
10662 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10663 } IEM_MC_ENDIF();
10664 IEM_MC_ADVANCE_RIP_AND_FINISH();
10665
10666 IEM_MC_END();
10667}
10668
10669
10670/** Opcode 0xda !11/4. */
10671FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
10672{
10673 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
10674 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
10675}
10676
10677
10678/** Opcode 0xda !11/5. */
10679FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
10680{
10681 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
10682 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
10683}
10684
10685
10686/** Opcode 0xda !11/6. */
10687FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
10688{
10689 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
10690 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
10691}
10692
10693
10694/** Opcode 0xda !11/7. */
10695FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
10696{
10697 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
10698 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
10699}
10700
10701
10702/**
10703 * @opcode 0xda
10704 */
10705FNIEMOP_DEF(iemOp_EscF2)
10706{
10707 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10708 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
10709 if (IEM_IS_MODRM_REG_MODE(bRm))
10710 {
10711 switch (IEM_GET_MODRM_REG_8(bRm))
10712 {
10713 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
10714 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
10715 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
10716 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
10717 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10718 case 5:
10719 if (bRm == 0xe9)
10720 return FNIEMOP_CALL(iemOp_fucompp);
10721 IEMOP_RAISE_INVALID_OPCODE_RET();
10722 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10723 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10724 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10725 }
10726 }
10727 else
10728 {
10729 switch (IEM_GET_MODRM_REG_8(bRm))
10730 {
10731 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
10732 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
10733 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
10734 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
10735 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
10736 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
10737 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
10738 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
10739 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10740 }
10741 }
10742}
10743
10744
10745/** Opcode 0xdb !11/0. */
10746FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
10747{
10748 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
10749
10750 IEM_MC_BEGIN(2, 3, 0, 0);
10751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10752 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10753 IEM_MC_LOCAL(int32_t, i32Val);
10754 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10755 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
10756
10757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10759
10760 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10761 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10762 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10763
10764 IEM_MC_PREPARE_FPU_USAGE();
10765 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10766 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
10767 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10768 } IEM_MC_ELSE() {
10769 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10770 } IEM_MC_ENDIF();
10771 IEM_MC_ADVANCE_RIP_AND_FINISH();
10772
10773 IEM_MC_END();
10774}
10775
10776
10777/** Opcode 0xdb !11/1. */
10778FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
10779{
10780 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
10781 IEM_MC_BEGIN(3, 2, 0, 0);
10782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10783 IEM_MC_LOCAL(uint16_t, u16Fsw);
10784 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10785 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10786 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10787
10788 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10790 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10791 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10792
10793 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10794 IEM_MC_PREPARE_FPU_USAGE();
10795 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10796 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10797 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10798 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10799 } IEM_MC_ELSE() {
10800 IEM_MC_IF_FCW_IM() {
10801 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10802 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10803 } IEM_MC_ENDIF();
10804 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10805 } IEM_MC_ENDIF();
10806 IEM_MC_ADVANCE_RIP_AND_FINISH();
10807
10808 IEM_MC_END();
10809}
10810
10811
10812/** Opcode 0xdb !11/2. */
10813FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
10814{
10815 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
10816 IEM_MC_BEGIN(3, 2, 0, 0);
10817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10818 IEM_MC_LOCAL(uint16_t, u16Fsw);
10819 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10820 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10821 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10822
10823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10825 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10826 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10827
10828 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10829 IEM_MC_PREPARE_FPU_USAGE();
10830 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10831 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10832 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10833 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10834 } IEM_MC_ELSE() {
10835 IEM_MC_IF_FCW_IM() {
10836 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10837 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10838 } IEM_MC_ENDIF();
10839 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10840 } IEM_MC_ENDIF();
10841 IEM_MC_ADVANCE_RIP_AND_FINISH();
10842
10843 IEM_MC_END();
10844}
10845
10846
10847/** Opcode 0xdb !11/3. */
10848FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
10849{
10850 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
10851 IEM_MC_BEGIN(3, 2, 0, 0);
10852 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10853 IEM_MC_LOCAL(uint16_t, u16Fsw);
10854 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10855 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10856 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10857
10858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10860 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10861 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10862
10863 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10864 IEM_MC_PREPARE_FPU_USAGE();
10865 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10866 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10867 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10868 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10869 } IEM_MC_ELSE() {
10870 IEM_MC_IF_FCW_IM() {
10871 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10872 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10873 } IEM_MC_ENDIF();
10874 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10875 } IEM_MC_ENDIF();
10876 IEM_MC_ADVANCE_RIP_AND_FINISH();
10877
10878 IEM_MC_END();
10879}
10880
10881
10882/** Opcode 0xdb !11/5. */
10883FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
10884{
10885 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
10886
10887 IEM_MC_BEGIN(2, 3, 0, 0);
10888 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10889 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10890 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
10891 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10892 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
10893
10894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10896
10897 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10898 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10899 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10900
10901 IEM_MC_PREPARE_FPU_USAGE();
10902 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10903 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
10904 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10905 } IEM_MC_ELSE() {
10906 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10907 } IEM_MC_ENDIF();
10908 IEM_MC_ADVANCE_RIP_AND_FINISH();
10909
10910 IEM_MC_END();
10911}
10912
10913
10914/** Opcode 0xdb !11/7. */
10915FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
10916{
10917 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
10918 IEM_MC_BEGIN(3, 2, 0, 0);
10919 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10920 IEM_MC_LOCAL(uint16_t, u16Fsw);
10921 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10922 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
10923 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10924
10925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10927 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10928 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10929
10930 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
10931 IEM_MC_PREPARE_FPU_USAGE();
10932 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10933 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
10934 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
10935 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10936 } IEM_MC_ELSE() {
10937 IEM_MC_IF_FCW_IM() {
10938 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
10939 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
10940 } IEM_MC_ENDIF();
10941 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10942 } IEM_MC_ENDIF();
10943 IEM_MC_ADVANCE_RIP_AND_FINISH();
10944
10945 IEM_MC_END();
10946}
10947
10948
10949/** Opcode 0xdb 11/0. */
10950FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
10951{
10952 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
10953 IEM_MC_BEGIN(0, 1, 0, 0);
10954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10955 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10956
10957 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10958 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10959
10960 IEM_MC_PREPARE_FPU_USAGE();
10961 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10962 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
10963 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10964 } IEM_MC_ENDIF();
10965 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10966 } IEM_MC_ELSE() {
10967 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10968 } IEM_MC_ENDIF();
10969 IEM_MC_ADVANCE_RIP_AND_FINISH();
10970
10971 IEM_MC_END();
10972}
10973
10974
10975/** Opcode 0xdb 11/1. */
10976FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
10977{
10978 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
10979 IEM_MC_BEGIN(0, 1, 0, 0);
10980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10981 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10982
10983 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10984 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10985
10986 IEM_MC_PREPARE_FPU_USAGE();
10987 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10988 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10989 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10990 } IEM_MC_ENDIF();
10991 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10992 } IEM_MC_ELSE() {
10993 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10994 } IEM_MC_ENDIF();
10995 IEM_MC_ADVANCE_RIP_AND_FINISH();
10996
10997 IEM_MC_END();
10998}
10999
11000
11001/** Opcode 0xdb 11/2. */
11002FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
11003{
11004 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
11005 IEM_MC_BEGIN(0, 1, 0, 0);
11006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11007 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11008
11009 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11010 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11011
11012 IEM_MC_PREPARE_FPU_USAGE();
11013 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11014 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11015 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11016 } IEM_MC_ENDIF();
11017 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11018 } IEM_MC_ELSE() {
11019 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11020 } IEM_MC_ENDIF();
11021 IEM_MC_ADVANCE_RIP_AND_FINISH();
11022
11023 IEM_MC_END();
11024}
11025
11026
11027/** Opcode 0xdb 11/3. */
11028FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
11029{
11030 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
11031 IEM_MC_BEGIN(0, 1, 0, 0);
11032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11033 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11034
11035 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11036 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11037
11038 IEM_MC_PREPARE_FPU_USAGE();
11039 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11040 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
11041 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11042 } IEM_MC_ENDIF();
11043 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11044 } IEM_MC_ELSE() {
11045 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11046 } IEM_MC_ENDIF();
11047 IEM_MC_ADVANCE_RIP_AND_FINISH();
11048
11049 IEM_MC_END();
11050}
11051
11052
11053/** Opcode 0xdb 0xe0. */
11054FNIEMOP_DEF(iemOp_fneni)
11055{
11056 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
11057 IEM_MC_BEGIN(0, 0, 0, 0);
11058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11059 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11060 IEM_MC_ADVANCE_RIP_AND_FINISH();
11061 IEM_MC_END();
11062}
11063
11064
11065/** Opcode 0xdb 0xe1. */
11066FNIEMOP_DEF(iemOp_fndisi)
11067{
11068 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
11069 IEM_MC_BEGIN(0, 0, 0, 0);
11070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11071 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11072 IEM_MC_ADVANCE_RIP_AND_FINISH();
11073 IEM_MC_END();
11074}
11075
11076
11077/** Opcode 0xdb 0xe2. */
11078FNIEMOP_DEF(iemOp_fnclex)
11079{
11080 IEMOP_MNEMONIC(fnclex, "fnclex");
11081 IEM_MC_BEGIN(0, 0, 0, 0);
11082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11083 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11084 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11085 IEM_MC_CLEAR_FSW_EX();
11086 IEM_MC_ADVANCE_RIP_AND_FINISH();
11087 IEM_MC_END();
11088}
11089
11090
11091/** Opcode 0xdb 0xe3. */
11092FNIEMOP_DEF(iemOp_fninit)
11093{
11094 IEMOP_MNEMONIC(fninit, "fninit");
11095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11096 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, 0, iemCImpl_finit, false /*fCheckXcpts*/);
11097}
11098
11099
11100/** Opcode 0xdb 0xe4. */
11101FNIEMOP_DEF(iemOp_fnsetpm)
11102{
11103 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
11104 IEM_MC_BEGIN(0, 0, 0, 0);
11105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11106 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11107 IEM_MC_ADVANCE_RIP_AND_FINISH();
11108 IEM_MC_END();
11109}
11110
11111
11112/** Opcode 0xdb 0xe5. */
11113FNIEMOP_DEF(iemOp_frstpm)
11114{
11115 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
11116#if 0 /* #UDs on newer CPUs */
11117 IEM_MC_BEGIN(0, 0, 0, 0);
11118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11119 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11120 IEM_MC_ADVANCE_RIP_AND_FINISH();
11121 IEM_MC_END();
11122 return VINF_SUCCESS;
11123#else
11124 IEMOP_RAISE_INVALID_OPCODE_RET();
11125#endif
11126}
11127
11128
11129/** Opcode 0xdb 11/5. */
11130FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
11131{
11132 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
11133 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11134 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
11135 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11136}
11137
11138
11139/** Opcode 0xdb 11/6. */
11140FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
11141{
11142 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
11143 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11144 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11145 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11146}
11147
11148
11149/**
11150 * @opcode 0xdb
11151 */
11152FNIEMOP_DEF(iemOp_EscF3)
11153{
11154 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11155 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
11156 if (IEM_IS_MODRM_REG_MODE(bRm))
11157 {
11158 switch (IEM_GET_MODRM_REG_8(bRm))
11159 {
11160 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
11161 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
11162 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
11163 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
11164 case 4:
11165 switch (bRm)
11166 {
11167 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
11168 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
11169 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
11170 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
11171 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
11172 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
11173 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
11174 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
11175 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11176 }
11177 break;
11178 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
11179 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
11180 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11181 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11182 }
11183 }
11184 else
11185 {
11186 switch (IEM_GET_MODRM_REG_8(bRm))
11187 {
11188 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
11189 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
11190 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
11191 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
11192 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11193 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
11194 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11195 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
11196 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11197 }
11198 }
11199}
11200
11201
11202/**
11203 * Common worker for FPU instructions working on STn and ST0, and storing the
11204 * result in STn unless IE, DE or ZE was raised.
11205 *
11206 * @param bRm Mod R/M byte.
11207 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11208 */
11209FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11210{
11211 IEM_MC_BEGIN(3, 1, 0, 0);
11212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11213 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11214 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11215 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11216 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11217
11218 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11219 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11220
11221 IEM_MC_PREPARE_FPU_USAGE();
11222 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
11223 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11224 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11225 } IEM_MC_ELSE() {
11226 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11227 } IEM_MC_ENDIF();
11228 IEM_MC_ADVANCE_RIP_AND_FINISH();
11229
11230 IEM_MC_END();
11231}
11232
11233
11234/** Opcode 0xdc 11/0. */
11235FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
11236{
11237 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
11238 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
11239}
11240
11241
11242/** Opcode 0xdc 11/1. */
11243FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
11244{
11245 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
11246 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
11247}
11248
11249
11250/** Opcode 0xdc 11/4. */
11251FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
11252{
11253 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
11254 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
11255}
11256
11257
11258/** Opcode 0xdc 11/5. */
11259FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
11260{
11261 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
11262 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
11263}
11264
11265
11266/** Opcode 0xdc 11/6. */
11267FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
11268{
11269 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
11270 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
11271}
11272
11273
11274/** Opcode 0xdc 11/7. */
11275FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
11276{
11277 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
11278 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
11279}
11280
11281
11282/**
11283 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
11284 * memory operand, and storing the result in ST0.
11285 *
11286 * @param bRm Mod R/M byte.
11287 * @param pfnImpl Pointer to the instruction implementation (assembly).
11288 */
11289FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
11290{
11291 IEM_MC_BEGIN(3, 3, 0, 0);
11292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11293 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11294 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
11295 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11296 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
11297 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
11298
11299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11301 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11302 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11303
11304 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11305 IEM_MC_PREPARE_FPU_USAGE();
11306 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
11307 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
11308 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11309 } IEM_MC_ELSE() {
11310 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11311 } IEM_MC_ENDIF();
11312 IEM_MC_ADVANCE_RIP_AND_FINISH();
11313
11314 IEM_MC_END();
11315}
11316
11317
11318/** Opcode 0xdc !11/0. */
11319FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
11320{
11321 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
11322 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
11323}
11324
11325
11326/** Opcode 0xdc !11/1. */
11327FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
11328{
11329 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
11330 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
11331}
11332
11333
11334/** Opcode 0xdc !11/2. */
11335FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
11336{
11337 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
11338
11339 IEM_MC_BEGIN(3, 3, 0, 0);
11340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11341 IEM_MC_LOCAL(uint16_t, u16Fsw);
11342 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11343 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11344 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11345 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11346
11347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11349
11350 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11351 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11352 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11353
11354 IEM_MC_PREPARE_FPU_USAGE();
11355 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11356 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11357 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11358 } IEM_MC_ELSE() {
11359 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11360 } IEM_MC_ENDIF();
11361 IEM_MC_ADVANCE_RIP_AND_FINISH();
11362
11363 IEM_MC_END();
11364}
11365
11366
11367/** Opcode 0xdc !11/3. */
11368FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
11369{
11370 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
11371
11372 IEM_MC_BEGIN(3, 3, 0, 0);
11373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11374 IEM_MC_LOCAL(uint16_t, u16Fsw);
11375 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11376 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11377 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11378 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11379
11380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11382
11383 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11384 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11385 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11386
11387 IEM_MC_PREPARE_FPU_USAGE();
11388 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11389 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11390 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11391 } IEM_MC_ELSE() {
11392 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11393 } IEM_MC_ENDIF();
11394 IEM_MC_ADVANCE_RIP_AND_FINISH();
11395
11396 IEM_MC_END();
11397}
11398
11399
11400/** Opcode 0xdc !11/4. */
11401FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
11402{
11403 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
11404 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
11405}
11406
11407
11408/** Opcode 0xdc !11/5. */
11409FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
11410{
11411 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
11412 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
11413}
11414
11415
11416/** Opcode 0xdc !11/6. */
11417FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
11418{
11419 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
11420 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
11421}
11422
11423
11424/** Opcode 0xdc !11/7. */
11425FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
11426{
11427 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
11428 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
11429}
11430
11431
11432/**
11433 * @opcode 0xdc
11434 */
11435FNIEMOP_DEF(iemOp_EscF4)
11436{
11437 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11438 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
11439 if (IEM_IS_MODRM_REG_MODE(bRm))
11440 {
11441 switch (IEM_GET_MODRM_REG_8(bRm))
11442 {
11443 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
11444 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
11445 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
11446 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
11447 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
11448 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
11449 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
11450 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
11451 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11452 }
11453 }
11454 else
11455 {
11456 switch (IEM_GET_MODRM_REG_8(bRm))
11457 {
11458 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
11459 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
11460 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
11461 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
11462 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
11463 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
11464 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
11465 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
11466 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11467 }
11468 }
11469}
11470
11471
11472/** Opcode 0xdd !11/0.
11473 * @sa iemOp_fld_m32r */
11474FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
11475{
11476 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
11477
11478 IEM_MC_BEGIN(2, 3, 0, 0);
11479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11480 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11481 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
11482 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11483 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
11484
11485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11487 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11488 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11489
11490 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11491 IEM_MC_PREPARE_FPU_USAGE();
11492 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11493 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
11494 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11495 } IEM_MC_ELSE() {
11496 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11497 } IEM_MC_ENDIF();
11498 IEM_MC_ADVANCE_RIP_AND_FINISH();
11499
11500 IEM_MC_END();
11501}
11502
11503
11504/** Opcode 0xdd !11/0. */
11505FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
11506{
11507 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
11508 IEM_MC_BEGIN(3, 2, 0, 0);
11509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11510 IEM_MC_LOCAL(uint16_t, u16Fsw);
11511 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11512 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11513 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11514
11515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11517 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11518 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11519
11520 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11521 IEM_MC_PREPARE_FPU_USAGE();
11522 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11523 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11524 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11525 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11526 } IEM_MC_ELSE() {
11527 IEM_MC_IF_FCW_IM() {
11528 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11529 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
11530 } IEM_MC_ENDIF();
11531 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11532 } IEM_MC_ENDIF();
11533 IEM_MC_ADVANCE_RIP_AND_FINISH();
11534
11535 IEM_MC_END();
11536}
11537
11538
11539/** Opcode 0xdd !11/0. */
11540FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
11541{
11542 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
11543 IEM_MC_BEGIN(3, 2, 0, 0);
11544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11545 IEM_MC_LOCAL(uint16_t, u16Fsw);
11546 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11547 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11548 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11549
11550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11552 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11553 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11554
11555 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11556 IEM_MC_PREPARE_FPU_USAGE();
11557 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11558 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11559 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11560 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11561 } IEM_MC_ELSE() {
11562 IEM_MC_IF_FCW_IM() {
11563 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11564 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11565 } IEM_MC_ENDIF();
11566 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11567 } IEM_MC_ENDIF();
11568 IEM_MC_ADVANCE_RIP_AND_FINISH();
11569
11570 IEM_MC_END();
11571}
11572
11573
11574
11575
11576/** Opcode 0xdd !11/0. */
11577FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
11578{
11579 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
11580 IEM_MC_BEGIN(3, 2, 0, 0);
11581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11582 IEM_MC_LOCAL(uint16_t, u16Fsw);
11583 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11584 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11585 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11586
11587 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11589 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11590 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11591
11592 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11593 IEM_MC_PREPARE_FPU_USAGE();
11594 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11595 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11596 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11597 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11598 } IEM_MC_ELSE() {
11599 IEM_MC_IF_FCW_IM() {
11600 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11601 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11602 } IEM_MC_ENDIF();
11603 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11604 } IEM_MC_ENDIF();
11605 IEM_MC_ADVANCE_RIP_AND_FINISH();
11606
11607 IEM_MC_END();
11608}
11609
11610
11611/** Opcode 0xdd !11/0. */
11612FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
11613{
11614 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
11615 IEM_MC_BEGIN(3, 0, 0, 0);
11616 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
11617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11618
11619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11620 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11621 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11622
11623 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11624 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11625 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
11626 IEM_MC_END();
11627}
11628
11629
11630/** Opcode 0xdd !11/0. */
11631FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
11632{
11633 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
11634 IEM_MC_BEGIN(3, 0, 0, 0);
11635 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
11636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11637
11638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11639 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11640 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
11641
11642 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11643 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11644 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
11645 IEM_MC_END();
11646}
11647
11648/** Opcode 0xdd !11/0. */
11649FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
11650{
11651 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
11652
11653 IEM_MC_BEGIN(0, 2, 0, 0);
11654 IEM_MC_LOCAL(uint16_t, u16Tmp);
11655 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11656
11657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11659 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11660
11661 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11662 IEM_MC_FETCH_FSW(u16Tmp);
11663 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
11664 IEM_MC_ADVANCE_RIP_AND_FINISH();
11665
11666/** @todo Debug / drop a hint to the verifier that things may differ
11667 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
11668 * NT4SP1. (X86_FSW_PE) */
11669 IEM_MC_END();
11670}
11671
11672
11673/** Opcode 0xdd 11/0. */
11674FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
11675{
11676 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
11677 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
11678 unmodified. */
11679 IEM_MC_BEGIN(0, 0, 0, 0);
11680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11681
11682 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11683 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11684
11685 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11686 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11687 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11688
11689 IEM_MC_ADVANCE_RIP_AND_FINISH();
11690 IEM_MC_END();
11691}
11692
11693
11694/** Opcode 0xdd 11/1. */
11695FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
11696{
11697 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
11698 IEM_MC_BEGIN(0, 2, 0, 0);
11699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11700 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11701 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11702 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11703 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11704
11705 IEM_MC_PREPARE_FPU_USAGE();
11706 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11707 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11708 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11709 } IEM_MC_ELSE() {
11710 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11711 } IEM_MC_ENDIF();
11712
11713 IEM_MC_ADVANCE_RIP_AND_FINISH();
11714 IEM_MC_END();
11715}
11716
11717
11718/** Opcode 0xdd 11/3. */
11719FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
11720{
11721 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
11722 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
11723}
11724
11725
11726/** Opcode 0xdd 11/4. */
11727FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
11728{
11729 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
11730 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
11731}
11732
11733
11734/**
11735 * @opcode 0xdd
11736 */
11737FNIEMOP_DEF(iemOp_EscF5)
11738{
11739 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11740 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
11741 if (IEM_IS_MODRM_REG_MODE(bRm))
11742 {
11743 switch (IEM_GET_MODRM_REG_8(bRm))
11744 {
11745 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
11746 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
11747 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
11748 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
11749 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
11750 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
11751 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11752 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11753 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11754 }
11755 }
11756 else
11757 {
11758 switch (IEM_GET_MODRM_REG_8(bRm))
11759 {
11760 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
11761 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
11762 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
11763 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
11764 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
11765 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
11766 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
11767 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
11768 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11769 }
11770 }
11771}
11772
11773
11774/** Opcode 0xde 11/0. */
11775FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
11776{
11777 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
11778 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
11779}
11780
11781
11782/** Opcode 0xde 11/0. */
11783FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
11784{
11785 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
11786 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
11787}
11788
11789
11790/** Opcode 0xde 0xd9. */
11791FNIEMOP_DEF(iemOp_fcompp)
11792{
11793 IEMOP_MNEMONIC(fcompp, "fcompp");
11794 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
11795}
11796
11797
11798/** Opcode 0xde 11/4. */
11799FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
11800{
11801 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
11802 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
11803}
11804
11805
11806/** Opcode 0xde 11/5. */
11807FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
11808{
11809 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
11810 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
11811}
11812
11813
11814/** Opcode 0xde 11/6. */
11815FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
11816{
11817 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
11818 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
11819}
11820
11821
11822/** Opcode 0xde 11/7. */
11823FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
11824{
11825 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
11826 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
11827}
11828
11829
11830/**
11831 * Common worker for FPU instructions working on ST0 and an m16i, and storing
11832 * the result in ST0.
11833 *
11834 * @param bRm Mod R/M byte.
11835 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11836 */
11837FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
11838{
11839 IEM_MC_BEGIN(3, 3, 0, 0);
11840 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11841 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11842 IEM_MC_LOCAL(int16_t, i16Val2);
11843 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11844 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11845 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11846
11847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11849
11850 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11851 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11852 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11853
11854 IEM_MC_PREPARE_FPU_USAGE();
11855 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11856 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
11857 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11858 } IEM_MC_ELSE() {
11859 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11860 } IEM_MC_ENDIF();
11861 IEM_MC_ADVANCE_RIP_AND_FINISH();
11862
11863 IEM_MC_END();
11864}
11865
11866
11867/** Opcode 0xde !11/0. */
11868FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
11869{
11870 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
11871 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
11872}
11873
11874
11875/** Opcode 0xde !11/1. */
11876FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
11877{
11878 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
11879 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
11880}
11881
11882
11883/** Opcode 0xde !11/2. */
11884FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
11885{
11886 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
11887
11888 IEM_MC_BEGIN(3, 3, 0, 0);
11889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11890 IEM_MC_LOCAL(uint16_t, u16Fsw);
11891 IEM_MC_LOCAL(int16_t, i16Val2);
11892 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11893 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11894 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11895
11896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11898
11899 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11900 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11901 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11902
11903 IEM_MC_PREPARE_FPU_USAGE();
11904 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11905 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11906 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11907 } IEM_MC_ELSE() {
11908 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11909 } IEM_MC_ENDIF();
11910 IEM_MC_ADVANCE_RIP_AND_FINISH();
11911
11912 IEM_MC_END();
11913}
11914
11915
11916/** Opcode 0xde !11/3. */
11917FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
11918{
11919 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
11920
11921 IEM_MC_BEGIN(3, 3, 0, 0);
11922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11923 IEM_MC_LOCAL(uint16_t, u16Fsw);
11924 IEM_MC_LOCAL(int16_t, i16Val2);
11925 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11926 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11927 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11928
11929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11931
11932 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11933 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11934 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11935
11936 IEM_MC_PREPARE_FPU_USAGE();
11937 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11938 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11939 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11940 } IEM_MC_ELSE() {
11941 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11942 } IEM_MC_ENDIF();
11943 IEM_MC_ADVANCE_RIP_AND_FINISH();
11944
11945 IEM_MC_END();
11946}
11947
11948
11949/** Opcode 0xde !11/4. */
11950FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
11951{
11952 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
11953 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
11954}
11955
11956
11957/** Opcode 0xde !11/5. */
11958FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
11959{
11960 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
11961 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
11962}
11963
11964
11965/** Opcode 0xde !11/6. */
11966FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
11967{
11968 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
11969 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
11970}
11971
11972
11973/** Opcode 0xde !11/7. */
11974FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
11975{
11976 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
11977 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
11978}
11979
11980
11981/**
11982 * @opcode 0xde
11983 */
11984FNIEMOP_DEF(iemOp_EscF6)
11985{
11986 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11987 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
11988 if (IEM_IS_MODRM_REG_MODE(bRm))
11989 {
11990 switch (IEM_GET_MODRM_REG_8(bRm))
11991 {
11992 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
11993 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
11994 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
11995 case 3: if (bRm == 0xd9)
11996 return FNIEMOP_CALL(iemOp_fcompp);
11997 IEMOP_RAISE_INVALID_OPCODE_RET();
11998 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
11999 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
12000 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
12001 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
12002 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12003 }
12004 }
12005 else
12006 {
12007 switch (IEM_GET_MODRM_REG_8(bRm))
12008 {
12009 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
12010 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
12011 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
12012 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
12013 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
12014 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
12015 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
12016 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
12017 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12018 }
12019 }
12020}
12021
12022
12023/** Opcode 0xdf 11/0.
12024 * Undocument instruction, assumed to work like ffree + fincstp. */
12025FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
12026{
12027 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
12028 IEM_MC_BEGIN(0, 0, 0, 0);
12029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12030
12031 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12032 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12033
12034 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12035 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12036 IEM_MC_FPU_STACK_INC_TOP();
12037 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12038
12039 IEM_MC_ADVANCE_RIP_AND_FINISH();
12040 IEM_MC_END();
12041}
12042
12043
12044/** Opcode 0xdf 0xe0. */
12045FNIEMOP_DEF(iemOp_fnstsw_ax)
12046{
12047 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
12048 IEM_MC_BEGIN(0, 1, 0, 0);
12049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12050 IEM_MC_LOCAL(uint16_t, u16Tmp);
12051 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12052 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12053 IEM_MC_FETCH_FSW(u16Tmp);
12054 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12055 IEM_MC_ADVANCE_RIP_AND_FINISH();
12056 IEM_MC_END();
12057}
12058
12059
12060/** Opcode 0xdf 11/5. */
12061FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
12062{
12063 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
12064 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12065 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12066 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12067}
12068
12069
12070/** Opcode 0xdf 11/6. */
12071FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
12072{
12073 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
12074 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12075 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12076 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12077}
12078
12079
12080/** Opcode 0xdf !11/0. */
12081FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
12082{
12083 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
12084
12085 IEM_MC_BEGIN(2, 3, 0, 0);
12086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12087 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12088 IEM_MC_LOCAL(int16_t, i16Val);
12089 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12090 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
12091
12092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12094
12095 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12096 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12097 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12098
12099 IEM_MC_PREPARE_FPU_USAGE();
12100 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12101 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
12102 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12103 } IEM_MC_ELSE() {
12104 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12105 } IEM_MC_ENDIF();
12106 IEM_MC_ADVANCE_RIP_AND_FINISH();
12107
12108 IEM_MC_END();
12109}
12110
12111
12112/** Opcode 0xdf !11/1. */
12113FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
12114{
12115 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
12116 IEM_MC_BEGIN(3, 2, 0, 0);
12117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12118 IEM_MC_LOCAL(uint16_t, u16Fsw);
12119 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12120 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12121 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12122
12123 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12125 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12126 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12127
12128 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
12129 IEM_MC_PREPARE_FPU_USAGE();
12130 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12131 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12132 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
12133 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12134 } IEM_MC_ELSE() {
12135 IEM_MC_IF_FCW_IM() {
12136 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12137 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
12138 } IEM_MC_ENDIF();
12139 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12140 } IEM_MC_ENDIF();
12141 IEM_MC_ADVANCE_RIP_AND_FINISH();
12142
12143 IEM_MC_END();
12144}
12145
12146
12147/** Opcode 0xdf !11/2. */
12148FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
12149{
12150 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
12151 IEM_MC_BEGIN(3, 2, 0, 0);
12152 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12153 IEM_MC_LOCAL(uint16_t, u16Fsw);
12154 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12155 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12156 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12157
12158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12160 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12161 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12162
12163 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
12164 IEM_MC_PREPARE_FPU_USAGE();
12165 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12166 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12167 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
12168 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12169 } IEM_MC_ELSE() {
12170 IEM_MC_IF_FCW_IM() {
12171 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12172 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
12173 } IEM_MC_ENDIF();
12174 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12175 } IEM_MC_ENDIF();
12176 IEM_MC_ADVANCE_RIP_AND_FINISH();
12177
12178 IEM_MC_END();
12179}
12180
12181
12182/** Opcode 0xdf !11/3. */
12183FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
12184{
12185 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
12186 IEM_MC_BEGIN(3, 2, 0, 0);
12187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12188 IEM_MC_LOCAL(uint16_t, u16Fsw);
12189 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12190 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12191 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12192
12193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12195 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12196 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12197
12198 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
12199 IEM_MC_PREPARE_FPU_USAGE();
12200 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12201 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12202 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
12203 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12204 } IEM_MC_ELSE() {
12205 IEM_MC_IF_FCW_IM() {
12206 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12207 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
12208 } IEM_MC_ENDIF();
12209 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12210 } IEM_MC_ENDIF();
12211 IEM_MC_ADVANCE_RIP_AND_FINISH();
12212
12213 IEM_MC_END();
12214}
12215
12216
12217/** Opcode 0xdf !11/4. */
12218FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
12219{
12220 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
12221
12222 IEM_MC_BEGIN(2, 3, 0, 0);
12223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12224 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12225 IEM_MC_LOCAL(RTPBCD80U, d80Val);
12226 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12227 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
12228
12229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12231
12232 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12233 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12234 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12235
12236 IEM_MC_PREPARE_FPU_USAGE();
12237 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12238 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
12239 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12240 } IEM_MC_ELSE() {
12241 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12242 } IEM_MC_ENDIF();
12243 IEM_MC_ADVANCE_RIP_AND_FINISH();
12244
12245 IEM_MC_END();
12246}
12247
12248
12249/** Opcode 0xdf !11/5. */
12250FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
12251{
12252 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
12253
12254 IEM_MC_BEGIN(2, 3, 0, 0);
12255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12256 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12257 IEM_MC_LOCAL(int64_t, i64Val);
12258 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12259 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
12260
12261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12263
12264 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12265 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12266 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12267
12268 IEM_MC_PREPARE_FPU_USAGE();
12269 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12270 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
12271 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12272 } IEM_MC_ELSE() {
12273 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12274 } IEM_MC_ENDIF();
12275 IEM_MC_ADVANCE_RIP_AND_FINISH();
12276
12277 IEM_MC_END();
12278}
12279
12280
12281/** Opcode 0xdf !11/6. */
12282FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
12283{
12284 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
12285 IEM_MC_BEGIN(3, 2, 0, 0);
12286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12287 IEM_MC_LOCAL(uint16_t, u16Fsw);
12288 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12289 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
12290 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12291
12292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12294 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12295 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12296
12297 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
12298 IEM_MC_PREPARE_FPU_USAGE();
12299 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12300 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
12301 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
12302 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12303 } IEM_MC_ELSE() {
12304 IEM_MC_IF_FCW_IM() {
12305 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
12306 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
12307 } IEM_MC_ENDIF();
12308 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12309 } IEM_MC_ENDIF();
12310 IEM_MC_ADVANCE_RIP_AND_FINISH();
12311
12312 IEM_MC_END();
12313}
12314
12315
12316/** Opcode 0xdf !11/7. */
12317FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
12318{
12319 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
12320 IEM_MC_BEGIN(3, 2, 0, 0);
12321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12322 IEM_MC_LOCAL(uint16_t, u16Fsw);
12323 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12324 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12325 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12326
12327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12329 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12330 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12331
12332 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
12333 IEM_MC_PREPARE_FPU_USAGE();
12334 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12335 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12336 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
12337 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12338 } IEM_MC_ELSE() {
12339 IEM_MC_IF_FCW_IM() {
12340 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12341 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
12342 } IEM_MC_ENDIF();
12343 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12344 } IEM_MC_ENDIF();
12345 IEM_MC_ADVANCE_RIP_AND_FINISH();
12346
12347 IEM_MC_END();
12348}
12349
12350
12351/**
12352 * @opcode 0xdf
12353 */
12354FNIEMOP_DEF(iemOp_EscF7)
12355{
12356 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12357 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
12358 if (IEM_IS_MODRM_REG_MODE(bRm))
12359 {
12360 switch (IEM_GET_MODRM_REG_8(bRm))
12361 {
12362 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
12363 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
12364 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
12365 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
12366 case 4: if (bRm == 0xe0)
12367 return FNIEMOP_CALL(iemOp_fnstsw_ax);
12368 IEMOP_RAISE_INVALID_OPCODE_RET();
12369 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
12370 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
12371 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12373 }
12374 }
12375 else
12376 {
12377 switch (IEM_GET_MODRM_REG_8(bRm))
12378 {
12379 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
12380 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
12381 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
12382 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
12383 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
12384 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
12385 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
12386 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
12387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12388 }
12389 }
12390}
12391
12392
12393/**
12394 * @opcode 0xe0
12395 */
12396FNIEMOP_DEF(iemOp_loopne_Jb)
12397{
12398 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
12399 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12400 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12401
12402 switch (pVCpu->iem.s.enmEffAddrMode)
12403 {
12404 case IEMMODE_16BIT:
12405 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12407 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12408 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12409 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12410 } IEM_MC_ELSE() {
12411 IEM_MC_ADVANCE_RIP_AND_FINISH();
12412 } IEM_MC_ENDIF();
12413 IEM_MC_END();
12414 break;
12415
12416 case IEMMODE_32BIT:
12417 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12419 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12420 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12421 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12422 } IEM_MC_ELSE() {
12423 IEM_MC_ADVANCE_RIP_AND_FINISH();
12424 } IEM_MC_ENDIF();
12425 IEM_MC_END();
12426 break;
12427
12428 case IEMMODE_64BIT:
12429 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12431 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12432 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12433 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12434 } IEM_MC_ELSE() {
12435 IEM_MC_ADVANCE_RIP_AND_FINISH();
12436 } IEM_MC_ENDIF();
12437 IEM_MC_END();
12438 break;
12439
12440 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12441 }
12442}
12443
12444
12445/**
12446 * @opcode 0xe1
12447 */
12448FNIEMOP_DEF(iemOp_loope_Jb)
12449{
12450 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
12451 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12452 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12453
12454 switch (pVCpu->iem.s.enmEffAddrMode)
12455 {
12456 case IEMMODE_16BIT:
12457 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12459 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12460 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
12461 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12462 } IEM_MC_ELSE() {
12463 IEM_MC_ADVANCE_RIP_AND_FINISH();
12464 } IEM_MC_ENDIF();
12465 IEM_MC_END();
12466 break;
12467
12468 case IEMMODE_32BIT:
12469 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12471 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12472 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
12473 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12474 } IEM_MC_ELSE() {
12475 IEM_MC_ADVANCE_RIP_AND_FINISH();
12476 } IEM_MC_ENDIF();
12477 IEM_MC_END();
12478 break;
12479
12480 case IEMMODE_64BIT:
12481 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12483 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12484 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
12485 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12486 } IEM_MC_ELSE() {
12487 IEM_MC_ADVANCE_RIP_AND_FINISH();
12488 } IEM_MC_ENDIF();
12489 IEM_MC_END();
12490 break;
12491
12492 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12493 }
12494}
12495
12496
12497/**
12498 * @opcode 0xe2
12499 */
12500FNIEMOP_DEF(iemOp_loop_Jb)
12501{
12502 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
12503 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12504 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12505
12506 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
12507 * using the 32-bit operand size override. How can that be restarted? See
12508 * weird pseudo code in intel manual. */
12509
12510 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
12511 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
12512 * the loop causes guest crashes, but when logging it's nice to skip a few million
12513 * lines of useless output. */
12514#if defined(LOG_ENABLED)
12515 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
12516 switch (pVCpu->iem.s.enmEffAddrMode)
12517 {
12518 case IEMMODE_16BIT:
12519 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12521 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
12522 IEM_MC_ADVANCE_RIP_AND_FINISH();
12523 IEM_MC_END();
12524 break;
12525
12526 case IEMMODE_32BIT:
12527 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12529 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
12530 IEM_MC_ADVANCE_RIP_AND_FINISH();
12531 IEM_MC_END();
12532 break;
12533
12534 case IEMMODE_64BIT:
12535 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12537 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
12538 IEM_MC_ADVANCE_RIP_AND_FINISH();
12539 IEM_MC_END();
12540 break;
12541
12542 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12543 }
12544#endif
12545
12546 switch (pVCpu->iem.s.enmEffAddrMode)
12547 {
12548 case IEMMODE_16BIT:
12549 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12551 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12552 IEM_MC_IF_CX_IS_NZ() {
12553 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12554 } IEM_MC_ELSE() {
12555 IEM_MC_ADVANCE_RIP_AND_FINISH();
12556 } IEM_MC_ENDIF();
12557 IEM_MC_END();
12558 break;
12559
12560 case IEMMODE_32BIT:
12561 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12563 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12564 IEM_MC_IF_ECX_IS_NZ() {
12565 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12566 } IEM_MC_ELSE() {
12567 IEM_MC_ADVANCE_RIP_AND_FINISH();
12568 } IEM_MC_ENDIF();
12569 IEM_MC_END();
12570 break;
12571
12572 case IEMMODE_64BIT:
12573 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12575 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12576 IEM_MC_IF_RCX_IS_NZ() {
12577 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12578 } IEM_MC_ELSE() {
12579 IEM_MC_ADVANCE_RIP_AND_FINISH();
12580 } IEM_MC_ENDIF();
12581 IEM_MC_END();
12582 break;
12583
12584 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12585 }
12586}
12587
12588
12589/**
12590 * @opcode 0xe3
12591 */
12592FNIEMOP_DEF(iemOp_jecxz_Jb)
12593{
12594 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
12595 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12596 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12597
12598 switch (pVCpu->iem.s.enmEffAddrMode)
12599 {
12600 case IEMMODE_16BIT:
12601 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12603 IEM_MC_IF_CX_IS_NZ() {
12604 IEM_MC_ADVANCE_RIP_AND_FINISH();
12605 } IEM_MC_ELSE() {
12606 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12607 } IEM_MC_ENDIF();
12608 IEM_MC_END();
12609 break;
12610
12611 case IEMMODE_32BIT:
12612 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12614 IEM_MC_IF_ECX_IS_NZ() {
12615 IEM_MC_ADVANCE_RIP_AND_FINISH();
12616 } IEM_MC_ELSE() {
12617 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12618 } IEM_MC_ENDIF();
12619 IEM_MC_END();
12620 break;
12621
12622 case IEMMODE_64BIT:
12623 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12625 IEM_MC_IF_RCX_IS_NZ() {
12626 IEM_MC_ADVANCE_RIP_AND_FINISH();
12627 } IEM_MC_ELSE() {
12628 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12629 } IEM_MC_ENDIF();
12630 IEM_MC_END();
12631 break;
12632
12633 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12634 }
12635}
12636
12637
12638/** Opcode 0xe4 */
12639FNIEMOP_DEF(iemOp_in_AL_Ib)
12640{
12641 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
12642 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12644 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12645 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12646}
12647
12648
12649/** Opcode 0xe5 */
12650FNIEMOP_DEF(iemOp_in_eAX_Ib)
12651{
12652 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
12653 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12655 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12656 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12657 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12658}
12659
12660
12661/** Opcode 0xe6 */
12662FNIEMOP_DEF(iemOp_out_Ib_AL)
12663{
12664 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
12665 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12667 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12668 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12669}
12670
12671
12672/** Opcode 0xe7 */
12673FNIEMOP_DEF(iemOp_out_Ib_eAX)
12674{
12675 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
12676 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12678 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12679 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12680 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12681}
12682
12683
12684/**
12685 * @opcode 0xe8
12686 */
12687FNIEMOP_DEF(iemOp_call_Jv)
12688{
12689 IEMOP_MNEMONIC(call_Jv, "call Jv");
12690 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12691 switch (pVCpu->iem.s.enmEffOpSize)
12692 {
12693 case IEMMODE_16BIT:
12694 {
12695 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12696 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
12697 iemCImpl_call_rel_16, (int16_t)u16Imm);
12698 }
12699
12700 case IEMMODE_32BIT:
12701 {
12702 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12703 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
12704 iemCImpl_call_rel_32, (int32_t)u32Imm);
12705 }
12706
12707 case IEMMODE_64BIT:
12708 {
12709 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12710 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
12711 iemCImpl_call_rel_64, u64Imm);
12712 }
12713
12714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12715 }
12716}
12717
12718
12719/**
12720 * @opcode 0xe9
12721 */
12722FNIEMOP_DEF(iemOp_jmp_Jv)
12723{
12724 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
12725 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12726 switch (pVCpu->iem.s.enmEffOpSize)
12727 {
12728 case IEMMODE_16BIT:
12729 IEM_MC_BEGIN(0, 0, 0, 0);
12730 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
12731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12732 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
12733 IEM_MC_END();
12734 break;
12735
12736 case IEMMODE_64BIT:
12737 case IEMMODE_32BIT:
12738 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12739 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
12740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12741 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
12742 IEM_MC_END();
12743 break;
12744
12745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12746 }
12747}
12748
12749
12750/**
12751 * @opcode 0xea
12752 */
12753FNIEMOP_DEF(iemOp_jmp_Ap)
12754{
12755 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
12756 IEMOP_HLP_NO_64BIT();
12757
12758 /* Decode the far pointer address and pass it on to the far call C implementation. */
12759 uint32_t off32Seg;
12760 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12761 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
12762 else
12763 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
12764 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
12765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12766 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
12767 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
12768 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
12769 /** @todo make task-switches, ring-switches, ++ return non-zero status */
12770}
12771
12772
12773/**
12774 * @opcode 0xeb
12775 */
12776FNIEMOP_DEF(iemOp_jmp_Jb)
12777{
12778 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
12779 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12780 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12781
12782 IEM_MC_BEGIN(0, 0, 0, 0);
12783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12784 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12785 IEM_MC_END();
12786}
12787
12788
12789/** Opcode 0xec */
12790FNIEMOP_DEF(iemOp_in_AL_DX)
12791{
12792 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
12793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12794 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12795 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12796 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
12797}
12798
12799
12800/** Opcode 0xed */
12801FNIEMOP_DEF(iemOp_in_eAX_DX)
12802{
12803 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
12804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12805 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12806 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12807 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12808 pVCpu->iem.s.enmEffAddrMode);
12809}
12810
12811
12812/** Opcode 0xee */
12813FNIEMOP_DEF(iemOp_out_DX_AL)
12814{
12815 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
12816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12817 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12818 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
12819}
12820
12821
12822/** Opcode 0xef */
12823FNIEMOP_DEF(iemOp_out_DX_eAX)
12824{
12825 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
12826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12827 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12828 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12829 pVCpu->iem.s.enmEffAddrMode);
12830}
12831
12832
12833/**
12834 * @opcode 0xf0
12835 */
12836FNIEMOP_DEF(iemOp_lock)
12837{
12838 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
12839 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12840 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
12841
12842 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12843 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12844}
12845
12846
12847/**
12848 * @opcode 0xf1
12849 */
12850FNIEMOP_DEF(iemOp_int1)
12851{
12852 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
12853 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
12854 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
12855 * LOADALL memo. Needs some testing. */
12856 IEMOP_HLP_MIN_386();
12857 /** @todo testcase! */
12858 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
12859 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
12860 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
12861}
12862
12863
12864/**
12865 * @opcode 0xf2
12866 */
12867FNIEMOP_DEF(iemOp_repne)
12868{
12869 /* This overrides any previous REPE prefix. */
12870 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
12871 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
12872 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
12873
12874 /* For the 4 entry opcode tables, REPNZ overrides any previous
12875 REPZ and operand size prefixes. */
12876 pVCpu->iem.s.idxPrefix = 3;
12877
12878 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12879 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12880}
12881
12882
12883/**
12884 * @opcode 0xf3
12885 */
12886FNIEMOP_DEF(iemOp_repe)
12887{
12888 /* This overrides any previous REPNE prefix. */
12889 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
12890 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
12891 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
12892
12893 /* For the 4 entry opcode tables, REPNZ overrides any previous
12894 REPNZ and operand size prefixes. */
12895 pVCpu->iem.s.idxPrefix = 2;
12896
12897 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12898 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12899}
12900
12901
12902/**
12903 * @opcode 0xf4
12904 */
12905FNIEMOP_DEF(iemOp_hlt)
12906{
12907 IEMOP_MNEMONIC(hlt, "hlt");
12908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12909 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_hlt);
12910}
12911
12912
12913/**
12914 * @opcode 0xf5
12915 */
12916FNIEMOP_DEF(iemOp_cmc)
12917{
12918 IEMOP_MNEMONIC(cmc, "cmc");
12919 IEM_MC_BEGIN(0, 0, 0, 0);
12920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12921 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
12922 IEM_MC_ADVANCE_RIP_AND_FINISH();
12923 IEM_MC_END();
12924}
12925
12926
12927/**
12928 * Body for of 'inc/dec/not/neg Eb'.
12929 */
12930#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
12931 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
12932 { \
12933 /* register access */ \
12934 IEM_MC_BEGIN(2, 0, 0, 0); \
12935 IEMOP_HLP_DONE_DECODING(); \
12936 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12937 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12938 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
12939 IEM_MC_REF_EFLAGS(pEFlags); \
12940 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12941 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12942 IEM_MC_END(); \
12943 } \
12944 else \
12945 { \
12946 /* memory access. */ \
12947 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
12948 { \
12949 IEM_MC_BEGIN(2, 2, 0, 0); \
12950 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12951 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12953 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12954 \
12955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12956 IEMOP_HLP_DONE_DECODING(); \
12957 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12958 IEM_MC_FETCH_EFLAGS(EFlags); \
12959 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12960 \
12961 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12962 IEM_MC_COMMIT_EFLAGS(EFlags); \
12963 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12964 IEM_MC_END(); \
12965 } \
12966 else \
12967 { \
12968 IEM_MC_BEGIN(2, 2, 0, 0); \
12969 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12970 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12971 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12972 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12973 \
12974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12975 IEMOP_HLP_DONE_DECODING(); \
12976 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12977 IEM_MC_FETCH_EFLAGS(EFlags); \
12978 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
12979 \
12980 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12981 IEM_MC_COMMIT_EFLAGS(EFlags); \
12982 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12983 IEM_MC_END(); \
12984 } \
12985 } \
12986 (void)0
12987
12988
12989/**
12990 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
12991 */
12992#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
12993 if (IEM_IS_MODRM_REG_MODE(bRm)) \
12994 { \
12995 /* \
12996 * Register target \
12997 */ \
12998 switch (pVCpu->iem.s.enmEffOpSize) \
12999 { \
13000 case IEMMODE_16BIT: \
13001 IEM_MC_BEGIN(2, 0, 0, 0); \
13002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13003 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13004 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13005 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13006 IEM_MC_REF_EFLAGS(pEFlags); \
13007 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13008 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13009 IEM_MC_END(); \
13010 break; \
13011 \
13012 case IEMMODE_32BIT: \
13013 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0); \
13014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13015 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13016 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13017 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13018 IEM_MC_REF_EFLAGS(pEFlags); \
13019 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13020 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
13021 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13022 IEM_MC_END(); \
13023 break; \
13024 \
13025 case IEMMODE_64BIT: \
13026 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0); \
13027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13028 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13029 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13030 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13031 IEM_MC_REF_EFLAGS(pEFlags); \
13032 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13033 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13034 IEM_MC_END(); \
13035 break; \
13036 \
13037 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13038 } \
13039 } \
13040 else \
13041 { \
13042 /* \
13043 * Memory target. \
13044 */ \
13045 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
13046 { \
13047 switch (pVCpu->iem.s.enmEffOpSize) \
13048 { \
13049 case IEMMODE_16BIT: \
13050 IEM_MC_BEGIN(2, 3, 0, 0); \
13051 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13052 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13054 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13055 \
13056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13058 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13059 IEM_MC_FETCH_EFLAGS(EFlags); \
13060 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13061 \
13062 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
13063 IEM_MC_COMMIT_EFLAGS(EFlags); \
13064 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13065 IEM_MC_END(); \
13066 break; \
13067 \
13068 case IEMMODE_32BIT: \
13069 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13070 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13071 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13072 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13073 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13074 \
13075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13077 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13078 IEM_MC_FETCH_EFLAGS(EFlags); \
13079 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13080 \
13081 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
13082 IEM_MC_COMMIT_EFLAGS(EFlags); \
13083 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13084 IEM_MC_END(); \
13085 break; \
13086 \
13087 case IEMMODE_64BIT: \
13088 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13089 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13090 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13092 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13093 \
13094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13096 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13097 IEM_MC_FETCH_EFLAGS(EFlags); \
13098 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13099 \
13100 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
13101 IEM_MC_COMMIT_EFLAGS(EFlags); \
13102 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13103 IEM_MC_END(); \
13104 break; \
13105 \
13106 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13107 } \
13108 } \
13109 else \
13110 { \
13111 (void)0
13112
13113#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
13114 switch (pVCpu->iem.s.enmEffOpSize) \
13115 { \
13116 case IEMMODE_16BIT: \
13117 IEM_MC_BEGIN(2, 3, 0, 0); \
13118 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13119 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13121 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13122 \
13123 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13124 IEMOP_HLP_DONE_DECODING(); \
13125 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13126 IEM_MC_FETCH_EFLAGS(EFlags); \
13127 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
13128 \
13129 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
13130 IEM_MC_COMMIT_EFLAGS(EFlags); \
13131 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13132 IEM_MC_END(); \
13133 break; \
13134 \
13135 case IEMMODE_32BIT: \
13136 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13137 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13138 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13140 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13141 \
13142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13143 IEMOP_HLP_DONE_DECODING(); \
13144 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13145 IEM_MC_FETCH_EFLAGS(EFlags); \
13146 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
13147 \
13148 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
13149 IEM_MC_COMMIT_EFLAGS(EFlags); \
13150 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13151 IEM_MC_END(); \
13152 break; \
13153 \
13154 case IEMMODE_64BIT: \
13155 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13156 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13157 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13159 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13160 \
13161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13162 IEMOP_HLP_DONE_DECODING(); \
13163 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13164 IEM_MC_FETCH_EFLAGS(EFlags); \
13165 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
13166 \
13167 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
13168 IEM_MC_COMMIT_EFLAGS(EFlags); \
13169 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13170 IEM_MC_END(); \
13171 break; \
13172 \
13173 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13174 } \
13175 } \
13176 } \
13177 (void)0
13178
13179
13180/**
13181 * @opmaps grp3_f6
13182 * @opcode /0
13183 * @todo also /1
13184 */
13185FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
13186{
13187 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
13188 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
13189
13190 if (IEM_IS_MODRM_REG_MODE(bRm))
13191 {
13192 /* register access */
13193 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13194 IEM_MC_BEGIN(3, 0, 0, 0);
13195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13196 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13197 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
13198 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13199 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13200 IEM_MC_REF_EFLAGS(pEFlags);
13201 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13202 IEM_MC_ADVANCE_RIP_AND_FINISH();
13203 IEM_MC_END();
13204 }
13205 else
13206 {
13207 /* memory access. */
13208 IEM_MC_BEGIN(3, 3, 0, 0);
13209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13211
13212 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13214
13215 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13216 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
13217 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13218
13219 IEM_MC_ARG_CONST(uint8_t, u8Src, u8Imm, 1);
13220 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13221 IEM_MC_FETCH_EFLAGS(EFlags);
13222 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13223
13224 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo);
13225 IEM_MC_COMMIT_EFLAGS(EFlags);
13226 IEM_MC_ADVANCE_RIP_AND_FINISH();
13227 IEM_MC_END();
13228 }
13229}
13230
13231
13232/** Opcode 0xf6 /4, /5, /6 and /7. */
13233FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
13234{
13235 if (IEM_IS_MODRM_REG_MODE(bRm))
13236 {
13237 /* register access */
13238 IEM_MC_BEGIN(3, 1, 0, 0);
13239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13240 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13241 IEM_MC_ARG(uint8_t, u8Value, 1);
13242 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13243 IEM_MC_LOCAL(int32_t, rc);
13244
13245 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13246 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13247 IEM_MC_REF_EFLAGS(pEFlags);
13248 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
13249 IEM_MC_IF_LOCAL_IS_Z(rc) {
13250 IEM_MC_ADVANCE_RIP_AND_FINISH();
13251 } IEM_MC_ELSE() {
13252 IEM_MC_RAISE_DIVIDE_ERROR();
13253 } IEM_MC_ENDIF();
13254
13255 IEM_MC_END();
13256 }
13257 else
13258 {
13259 /* memory access. */
13260 IEM_MC_BEGIN(3, 2, 0, 0);
13261 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13262 IEM_MC_ARG(uint8_t, u8Value, 1);
13263 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13265 IEM_MC_LOCAL(int32_t, rc);
13266
13267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13269 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13270 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13271 IEM_MC_REF_EFLAGS(pEFlags);
13272 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
13273 IEM_MC_IF_LOCAL_IS_Z(rc) {
13274 IEM_MC_ADVANCE_RIP_AND_FINISH();
13275 } IEM_MC_ELSE() {
13276 IEM_MC_RAISE_DIVIDE_ERROR();
13277 } IEM_MC_ENDIF();
13278
13279 IEM_MC_END();
13280 }
13281}
13282
13283
13284/** Opcode 0xf7 /4, /5, /6 and /7. */
13285FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
13286{
13287 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13288
13289 if (IEM_IS_MODRM_REG_MODE(bRm))
13290 {
13291 /* register access */
13292 switch (pVCpu->iem.s.enmEffOpSize)
13293 {
13294 case IEMMODE_16BIT:
13295 IEM_MC_BEGIN(4, 1, 0, 0);
13296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13297 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13298 IEM_MC_ARG(uint16_t *, pu16DX, 1);
13299 IEM_MC_ARG(uint16_t, u16Value, 2);
13300 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13301 IEM_MC_LOCAL(int32_t, rc);
13302
13303 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13304 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13305 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
13306 IEM_MC_REF_EFLAGS(pEFlags);
13307 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
13308 IEM_MC_IF_LOCAL_IS_Z(rc) {
13309 IEM_MC_ADVANCE_RIP_AND_FINISH();
13310 } IEM_MC_ELSE() {
13311 IEM_MC_RAISE_DIVIDE_ERROR();
13312 } IEM_MC_ENDIF();
13313
13314 IEM_MC_END();
13315 break;
13316
13317 case IEMMODE_32BIT:
13318 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386, 0);
13319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13320 IEM_MC_ARG(uint32_t *, pu32AX, 0);
13321 IEM_MC_ARG(uint32_t *, pu32DX, 1);
13322 IEM_MC_ARG(uint32_t, u32Value, 2);
13323 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13324 IEM_MC_LOCAL(int32_t, rc);
13325
13326 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13327 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
13328 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
13329 IEM_MC_REF_EFLAGS(pEFlags);
13330 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
13331 IEM_MC_IF_LOCAL_IS_Z(rc) {
13332 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
13333 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
13334 IEM_MC_ADVANCE_RIP_AND_FINISH();
13335 } IEM_MC_ELSE() {
13336 IEM_MC_RAISE_DIVIDE_ERROR();
13337 } IEM_MC_ENDIF();
13338
13339 IEM_MC_END();
13340 break;
13341
13342 case IEMMODE_64BIT:
13343 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0);
13344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13345 IEM_MC_ARG(uint64_t *, pu64AX, 0);
13346 IEM_MC_ARG(uint64_t *, pu64DX, 1);
13347 IEM_MC_ARG(uint64_t, u64Value, 2);
13348 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13349 IEM_MC_LOCAL(int32_t, rc);
13350
13351 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13352 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
13353 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
13354 IEM_MC_REF_EFLAGS(pEFlags);
13355 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
13356 IEM_MC_IF_LOCAL_IS_Z(rc) {
13357 IEM_MC_ADVANCE_RIP_AND_FINISH();
13358 } IEM_MC_ELSE() {
13359 IEM_MC_RAISE_DIVIDE_ERROR();
13360 } IEM_MC_ENDIF();
13361
13362 IEM_MC_END();
13363 break;
13364
13365 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13366 }
13367 }
13368 else
13369 {
13370 /* memory access. */
13371 switch (pVCpu->iem.s.enmEffOpSize)
13372 {
13373 case IEMMODE_16BIT:
13374 IEM_MC_BEGIN(4, 2, 0, 0);
13375 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13376 IEM_MC_ARG(uint16_t *, pu16DX, 1);
13377 IEM_MC_ARG(uint16_t, u16Value, 2);
13378 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13380 IEM_MC_LOCAL(int32_t, rc);
13381
13382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13384 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13385 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13386 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
13387 IEM_MC_REF_EFLAGS(pEFlags);
13388 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
13389 IEM_MC_IF_LOCAL_IS_Z(rc) {
13390 IEM_MC_ADVANCE_RIP_AND_FINISH();
13391 } IEM_MC_ELSE() {
13392 IEM_MC_RAISE_DIVIDE_ERROR();
13393 } IEM_MC_ENDIF();
13394
13395 IEM_MC_END();
13396 break;
13397
13398 case IEMMODE_32BIT:
13399 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386, 0);
13400 IEM_MC_ARG(uint32_t *, pu32AX, 0);
13401 IEM_MC_ARG(uint32_t *, pu32DX, 1);
13402 IEM_MC_ARG(uint32_t, u32Value, 2);
13403 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13405 IEM_MC_LOCAL(int32_t, rc);
13406
13407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13409 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13410 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
13411 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
13412 IEM_MC_REF_EFLAGS(pEFlags);
13413 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
13414 IEM_MC_IF_LOCAL_IS_Z(rc) {
13415 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
13416 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
13417 IEM_MC_ADVANCE_RIP_AND_FINISH();
13418 } IEM_MC_ELSE() {
13419 IEM_MC_RAISE_DIVIDE_ERROR();
13420 } IEM_MC_ENDIF();
13421
13422 IEM_MC_END();
13423 break;
13424
13425 case IEMMODE_64BIT:
13426 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT, 0);
13427 IEM_MC_ARG(uint64_t *, pu64AX, 0);
13428 IEM_MC_ARG(uint64_t *, pu64DX, 1);
13429 IEM_MC_ARG(uint64_t, u64Value, 2);
13430 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13432 IEM_MC_LOCAL(int32_t, rc);
13433
13434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13436 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13437 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
13438 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
13439 IEM_MC_REF_EFLAGS(pEFlags);
13440 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
13441 IEM_MC_IF_LOCAL_IS_Z(rc) {
13442 IEM_MC_ADVANCE_RIP_AND_FINISH();
13443 } IEM_MC_ELSE() {
13444 IEM_MC_RAISE_DIVIDE_ERROR();
13445 } IEM_MC_ENDIF();
13446
13447 IEM_MC_END();
13448 break;
13449
13450 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13451 }
13452 }
13453}
13454
13455
13456/**
13457 * @opmaps grp3_f6
13458 * @opcode /2
13459 */
13460FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
13461{
13462 IEMOP_MNEMONIC(not_Eb, "not Eb");
13463 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
13464}
13465
13466
13467/**
13468 * @opmaps grp3_f6
13469 * @opcode /3
13470 */
13471FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
13472{
13473 IEMOP_MNEMONIC(net_Eb, "neg Eb");
13474 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
13475}
13476
13477
13478/**
13479 * @opcode 0xf6
13480 */
13481FNIEMOP_DEF(iemOp_Grp3_Eb)
13482{
13483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13484 switch (IEM_GET_MODRM_REG_8(bRm))
13485 {
13486 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
13487 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
13488 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
13489 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
13490 case 4:
13491 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
13492 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13493 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
13494 case 5:
13495 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
13496 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13497 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
13498 case 6:
13499 IEMOP_MNEMONIC(div_Eb, "div Eb");
13500 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13501 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
13502 case 7:
13503 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
13504 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13505 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
13506 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13507 }
13508}
13509
13510
13511/** Opcode 0xf7 /0. */
13512FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
13513{
13514 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
13515 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
13516
13517 if (IEM_IS_MODRM_REG_MODE(bRm))
13518 {
13519 /* register access */
13520 switch (pVCpu->iem.s.enmEffOpSize)
13521 {
13522 case IEMMODE_16BIT:
13523 IEM_MC_BEGIN(3, 0, 0, 0);
13524 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13526 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13527 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
13528 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13529 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13530 IEM_MC_REF_EFLAGS(pEFlags);
13531 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13532 IEM_MC_ADVANCE_RIP_AND_FINISH();
13533 IEM_MC_END();
13534 break;
13535
13536 case IEMMODE_32BIT:
13537 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
13538 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13540 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13541 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
13542 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13543 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13544 IEM_MC_REF_EFLAGS(pEFlags);
13545 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13546 /* No clearing the high dword here - test doesn't write back the result. */
13547 IEM_MC_ADVANCE_RIP_AND_FINISH();
13548 IEM_MC_END();
13549 break;
13550
13551 case IEMMODE_64BIT:
13552 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
13553 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13555 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13556 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
13557 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13558 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13559 IEM_MC_REF_EFLAGS(pEFlags);
13560 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13561 IEM_MC_ADVANCE_RIP_AND_FINISH();
13562 IEM_MC_END();
13563 break;
13564
13565 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13566 }
13567 }
13568 else
13569 {
13570 /* memory access. */
13571 switch (pVCpu->iem.s.enmEffOpSize)
13572 {
13573 case IEMMODE_16BIT:
13574 IEM_MC_BEGIN(3, 3, 0, 0);
13575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13577
13578 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13580
13581 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13582 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
13583 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13584
13585 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
13586 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13587 IEM_MC_FETCH_EFLAGS(EFlags);
13588 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13589
13590 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo);
13591 IEM_MC_COMMIT_EFLAGS(EFlags);
13592 IEM_MC_ADVANCE_RIP_AND_FINISH();
13593 IEM_MC_END();
13594 break;
13595
13596 case IEMMODE_32BIT:
13597 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
13598 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13600
13601 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13603
13604 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13605 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
13606 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13607
13608 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
13609 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13610 IEM_MC_FETCH_EFLAGS(EFlags);
13611 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13612
13613 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo);
13614 IEM_MC_COMMIT_EFLAGS(EFlags);
13615 IEM_MC_ADVANCE_RIP_AND_FINISH();
13616 IEM_MC_END();
13617 break;
13618
13619 case IEMMODE_64BIT:
13620 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
13621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13623
13624 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13626
13627 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
13628 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13629 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13630
13631 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1);
13632 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13633 IEM_MC_FETCH_EFLAGS(EFlags);
13634 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13635
13636 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo);
13637 IEM_MC_COMMIT_EFLAGS(EFlags);
13638 IEM_MC_ADVANCE_RIP_AND_FINISH();
13639 IEM_MC_END();
13640 break;
13641
13642 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13643 }
13644 }
13645}
13646
13647
13648/** Opcode 0xf7 /2. */
13649FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
13650{
13651 IEMOP_MNEMONIC(not_Ev, "not Ev");
13652 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
13653 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
13654}
13655
13656
13657/** Opcode 0xf7 /3. */
13658FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
13659{
13660 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
13661 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
13662 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
13663}
13664
13665
13666/**
13667 * @opcode 0xf7
13668 */
13669FNIEMOP_DEF(iemOp_Grp3_Ev)
13670{
13671 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13672 switch (IEM_GET_MODRM_REG_8(bRm))
13673 {
13674 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13675 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13676 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
13677 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
13678 case 4:
13679 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
13680 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13681 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
13682 case 5:
13683 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
13684 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13685 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
13686 case 6:
13687 IEMOP_MNEMONIC(div_Ev, "div Ev");
13688 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13689 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
13690 case 7:
13691 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
13692 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13693 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
13694 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13695 }
13696}
13697
13698
13699/**
13700 * @opcode 0xf8
13701 */
13702FNIEMOP_DEF(iemOp_clc)
13703{
13704 IEMOP_MNEMONIC(clc, "clc");
13705 IEM_MC_BEGIN(0, 0, 0, 0);
13706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13707 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
13708 IEM_MC_ADVANCE_RIP_AND_FINISH();
13709 IEM_MC_END();
13710}
13711
13712
13713/**
13714 * @opcode 0xf9
13715 */
13716FNIEMOP_DEF(iemOp_stc)
13717{
13718 IEMOP_MNEMONIC(stc, "stc");
13719 IEM_MC_BEGIN(0, 0, 0, 0);
13720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13721 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
13722 IEM_MC_ADVANCE_RIP_AND_FINISH();
13723 IEM_MC_END();
13724}
13725
13726
13727/**
13728 * @opcode 0xfa
13729 */
13730FNIEMOP_DEF(iemOp_cli)
13731{
13732 IEMOP_MNEMONIC(cli, "cli");
13733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13734 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, 0, iemCImpl_cli);
13735}
13736
13737
13738FNIEMOP_DEF(iemOp_sti)
13739{
13740 IEMOP_MNEMONIC(sti, "sti");
13741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13742 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
13743 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, 0, iemCImpl_sti);
13744}
13745
13746
13747/**
13748 * @opcode 0xfc
13749 */
13750FNIEMOP_DEF(iemOp_cld)
13751{
13752 IEMOP_MNEMONIC(cld, "cld");
13753 IEM_MC_BEGIN(0, 0, 0, 0);
13754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13755 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
13756 IEM_MC_ADVANCE_RIP_AND_FINISH();
13757 IEM_MC_END();
13758}
13759
13760
13761/**
13762 * @opcode 0xfd
13763 */
13764FNIEMOP_DEF(iemOp_std)
13765{
13766 IEMOP_MNEMONIC(std, "std");
13767 IEM_MC_BEGIN(0, 0, 0, 0);
13768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13769 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
13770 IEM_MC_ADVANCE_RIP_AND_FINISH();
13771 IEM_MC_END();
13772}
13773
13774
13775/**
13776 * @opmaps grp4
13777 * @opcode /0
13778 */
13779FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
13780{
13781 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
13782 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
13783}
13784
13785
13786/**
13787 * @opmaps grp4
13788 * @opcode /1
13789 */
13790FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
13791{
13792 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
13793 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
13794}
13795
13796
13797/**
13798 * @opcode 0xfe
13799 */
13800FNIEMOP_DEF(iemOp_Grp4)
13801{
13802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13803 switch (IEM_GET_MODRM_REG_8(bRm))
13804 {
13805 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
13806 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
13807 default:
13808 /** @todo is the eff-addr decoded? */
13809 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
13810 IEMOP_RAISE_INVALID_OPCODE_RET();
13811 }
13812}
13813
13814/** Opcode 0xff /0. */
13815FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
13816{
13817 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
13818 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
13819 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
13820}
13821
13822
13823/** Opcode 0xff /1. */
13824FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
13825{
13826 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
13827 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
13828 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
13829}
13830
13831
13832/**
13833 * Opcode 0xff /2.
13834 * @param bRm The RM byte.
13835 */
13836FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
13837{
13838 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
13839 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13840
13841 if (IEM_IS_MODRM_REG_MODE(bRm))
13842 {
13843 /* The new RIP is taken from a register. */
13844 switch (pVCpu->iem.s.enmEffOpSize)
13845 {
13846 case IEMMODE_16BIT:
13847 IEM_MC_BEGIN(1, 0, 0, 0);
13848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13849 IEM_MC_ARG(uint16_t, u16Target, 0);
13850 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13851 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
13852 IEM_MC_END();
13853 break;
13854
13855 case IEMMODE_32BIT:
13856 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
13857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13858 IEM_MC_ARG(uint32_t, u32Target, 0);
13859 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13860 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
13861 IEM_MC_END();
13862 break;
13863
13864 case IEMMODE_64BIT:
13865 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
13866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13867 IEM_MC_ARG(uint64_t, u64Target, 0);
13868 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13869 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
13870 IEM_MC_END();
13871 break;
13872
13873 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13874 }
13875 }
13876 else
13877 {
13878 /* The new RIP is taken from a register. */
13879 switch (pVCpu->iem.s.enmEffOpSize)
13880 {
13881 case IEMMODE_16BIT:
13882 IEM_MC_BEGIN(1, 1, 0, 0);
13883 IEM_MC_ARG(uint16_t, u16Target, 0);
13884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13887 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13888 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
13889 IEM_MC_END();
13890 break;
13891
13892 case IEMMODE_32BIT:
13893 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
13894 IEM_MC_ARG(uint32_t, u32Target, 0);
13895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13898 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13899 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
13900 IEM_MC_END();
13901 break;
13902
13903 case IEMMODE_64BIT:
13904 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT, 0);
13905 IEM_MC_ARG(uint64_t, u64Target, 0);
13906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13909 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13910 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
13911 IEM_MC_END();
13912 break;
13913
13914 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13915 }
13916 }
13917}
13918
13919#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl, a_fCImplExtra) \
13920 /* Registers? How?? */ \
13921 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
13922 { /* likely */ } \
13923 else \
13924 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
13925 \
13926 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
13927 /** @todo what does VIA do? */ \
13928 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
13929 { /* likely */ } \
13930 else \
13931 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
13932 \
13933 /* Far pointer loaded from memory. */ \
13934 switch (pVCpu->iem.s.enmEffOpSize) \
13935 { \
13936 case IEMMODE_16BIT: \
13937 IEM_MC_BEGIN(3, 1, 0, 0); \
13938 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13939 IEM_MC_ARG(uint16_t, offSeg, 1); \
13940 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
13941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13944 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13945 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
13946 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
13947 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
13948 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13949 IEM_MC_END(); \
13950 break; \
13951 \
13952 case IEMMODE_32BIT: \
13953 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
13954 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13955 IEM_MC_ARG(uint32_t, offSeg, 1); \
13956 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
13957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13960 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13961 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
13962 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
13963 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
13964 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13965 IEM_MC_END(); \
13966 break; \
13967 \
13968 case IEMMODE_64BIT: \
13969 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
13970 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0); \
13971 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13972 IEM_MC_ARG(uint64_t, offSeg, 1); \
13973 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
13974 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13975 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13977 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13978 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
13979 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
13980 | IEM_CIMPL_F_MODE /* no gates */, 0, \
13981 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13982 IEM_MC_END(); \
13983 break; \
13984 \
13985 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13986 } do {} while (0)
13987
13988
13989/**
13990 * Opcode 0xff /3.
13991 * @param bRm The RM byte.
13992 */
13993FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
13994{
13995 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
13996 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf, IEM_CIMPL_F_BRANCH_STACK);
13997}
13998
13999
14000/**
14001 * Opcode 0xff /4.
14002 * @param bRm The RM byte.
14003 */
14004FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
14005{
14006 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
14007 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14008
14009 if (IEM_IS_MODRM_REG_MODE(bRm))
14010 {
14011 /* The new RIP is taken from a register. */
14012 switch (pVCpu->iem.s.enmEffOpSize)
14013 {
14014 case IEMMODE_16BIT:
14015 IEM_MC_BEGIN(0, 1, 0, 0);
14016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14017 IEM_MC_LOCAL(uint16_t, u16Target);
14018 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14019 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14020 IEM_MC_END();
14021 break;
14022
14023 case IEMMODE_32BIT:
14024 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
14025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14026 IEM_MC_LOCAL(uint32_t, u32Target);
14027 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14028 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14029 IEM_MC_END();
14030 break;
14031
14032 case IEMMODE_64BIT:
14033 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
14034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14035 IEM_MC_LOCAL(uint64_t, u64Target);
14036 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14037 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14038 IEM_MC_END();
14039 break;
14040
14041 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14042 }
14043 }
14044 else
14045 {
14046 /* The new RIP is taken from a memory location. */
14047 switch (pVCpu->iem.s.enmEffOpSize)
14048 {
14049 case IEMMODE_16BIT:
14050 IEM_MC_BEGIN(0, 2, 0, 0);
14051 IEM_MC_LOCAL(uint16_t, u16Target);
14052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14055 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14056 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14057 IEM_MC_END();
14058 break;
14059
14060 case IEMMODE_32BIT:
14061 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
14062 IEM_MC_LOCAL(uint32_t, u32Target);
14063 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14066 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14067 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14068 IEM_MC_END();
14069 break;
14070
14071 case IEMMODE_64BIT:
14072 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14073 IEM_MC_LOCAL(uint64_t, u64Target);
14074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14077 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14078 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14079 IEM_MC_END();
14080 break;
14081
14082 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14083 }
14084 }
14085}
14086
14087
14088/**
14089 * Opcode 0xff /5.
14090 * @param bRm The RM byte.
14091 */
14092FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
14093{
14094 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
14095 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp, 0);
14096}
14097
14098
14099/**
14100 * Opcode 0xff /6.
14101 * @param bRm The RM byte.
14102 */
14103FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
14104{
14105 IEMOP_MNEMONIC(push_Ev, "push Ev");
14106
14107 /* Registers are handled by a common worker. */
14108 if (IEM_IS_MODRM_REG_MODE(bRm))
14109 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
14110
14111 /* Memory we do here. */
14112 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14113 switch (pVCpu->iem.s.enmEffOpSize)
14114 {
14115 case IEMMODE_16BIT:
14116 IEM_MC_BEGIN(0, 2, 0, 0);
14117 IEM_MC_LOCAL(uint16_t, u16Src);
14118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14119 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14121 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14122 IEM_MC_PUSH_U16(u16Src);
14123 IEM_MC_ADVANCE_RIP_AND_FINISH();
14124 IEM_MC_END();
14125 break;
14126
14127 case IEMMODE_32BIT:
14128 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
14129 IEM_MC_LOCAL(uint32_t, u32Src);
14130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14133 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14134 IEM_MC_PUSH_U32(u32Src);
14135 IEM_MC_ADVANCE_RIP_AND_FINISH();
14136 IEM_MC_END();
14137 break;
14138
14139 case IEMMODE_64BIT:
14140 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14141 IEM_MC_LOCAL(uint64_t, u64Src);
14142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14145 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14146 IEM_MC_PUSH_U64(u64Src);
14147 IEM_MC_ADVANCE_RIP_AND_FINISH();
14148 IEM_MC_END();
14149 break;
14150
14151 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14152 }
14153}
14154
14155
14156/**
14157 * @opcode 0xff
14158 */
14159FNIEMOP_DEF(iemOp_Grp5)
14160{
14161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14162 switch (IEM_GET_MODRM_REG_8(bRm))
14163 {
14164 case 0:
14165 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
14166 case 1:
14167 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
14168 case 2:
14169 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
14170 case 3:
14171 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
14172 case 4:
14173 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
14174 case 5:
14175 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
14176 case 6:
14177 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
14178 case 7:
14179 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
14180 IEMOP_RAISE_INVALID_OPCODE_RET();
14181 }
14182 AssertFailedReturn(VERR_IEM_IPE_3);
14183}
14184
14185
14186
14187const PFNIEMOP g_apfnOneByteMap[256] =
14188{
14189 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
14190 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
14191 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
14192 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
14193 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
14194 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
14195 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
14196 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
14197 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
14198 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
14199 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
14200 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
14201 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
14202 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
14203 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
14204 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
14205 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
14206 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
14207 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
14208 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
14209 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
14210 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
14211 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
14212 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
14213 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
14214 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
14215 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
14216 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
14217 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
14218 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
14219 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
14220 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
14221 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
14222 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
14223 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
14224 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
14225 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
14226 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
14227 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
14228 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
14229 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
14230 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
14231 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
14232 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
14233 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
14234 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
14235 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
14236 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
14237 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
14238 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
14239 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
14240 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
14241 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
14242 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
14243 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
14244 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
14245 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
14246 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
14247 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
14248 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
14249 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
14250 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
14251 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
14252 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
14253};
14254
14255
14256/** @} */
14257
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette