VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 102033

Last change on this file since 102033 was 102033, checked in by vboxsync, 13 months ago

VMM/IEM: Fixed some inconsistencies wrt input for IEM_MC_PUSH_*. bugref:10371

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 532.0 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 102033 2023-11-09 13:16:34Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 *
63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
64 */
65#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8) \
66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
67 \
68 /* \
69 * If rm is denoting a register, no more instruction bytes. \
70 */ \
71 if (IEM_IS_MODRM_REG_MODE(bRm)) \
72 { \
73 IEM_MC_BEGIN(3, 0, 0, 0); \
74 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
75 IEM_MC_ARG(uint8_t, u8Src, 1); \
76 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
77 \
78 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
79 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
80 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
81 IEM_MC_REF_EFLAGS(pEFlags); \
82 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
83 \
84 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
85 IEM_MC_END(); \
86 } \
87 else \
88 { \
89 /* \
90 * We're accessing memory. \
91 * Note! We're putting the eflags on the stack here so we can commit them \
92 * after the memory. \
93 */ \
94 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
95 { \
96 IEM_MC_BEGIN(3, 3, 0, 0); \
97 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
98 IEM_MC_ARG(uint8_t, u8Src, 1); \
99 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
101 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
102 \
103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
104 IEMOP_HLP_DONE_DECODING(); \
105 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
106 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
107 IEM_MC_FETCH_EFLAGS(EFlags); \
108 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
109 \
110 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
111 IEM_MC_COMMIT_EFLAGS(EFlags); \
112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
113 IEM_MC_END(); \
114 } \
115 else \
116 { \
117 (void)0
118
119/**
120 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
121 * operands.
122 *
123 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
124 */
125#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8) \
126 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
127 \
128 /* \
129 * If rm is denoting a register, no more instruction bytes. \
130 */ \
131 if (IEM_IS_MODRM_REG_MODE(bRm)) \
132 { \
133 IEM_MC_BEGIN(3, 0, 0, 0); \
134 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
135 IEM_MC_ARG(uint8_t, u8Src, 1); \
136 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
137 \
138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
139 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
140 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
141 IEM_MC_REF_EFLAGS(pEFlags); \
142 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
143 \
144 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
145 IEM_MC_END(); \
146 } \
147 else \
148 { \
149 /* \
150 * We're accessing memory. \
151 * Note! We're putting the eflags on the stack here so we can commit them \
152 * after the memory. \
153 */ \
154 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
155 { \
156 IEM_MC_BEGIN(3, 3, 0, 0); \
157 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
158 IEM_MC_ARG(uint8_t, u8Src, 1); \
159 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
161 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
162 \
163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
164 IEMOP_HLP_DONE_DECODING(); \
165 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
166 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
167 IEM_MC_FETCH_EFLAGS(EFlags); \
168 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
169 \
170 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
171 IEM_MC_COMMIT_EFLAGS(EFlags); \
172 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
173 IEM_MC_END(); \
174 } \
175 else \
176 { \
177 (void)0
178
179#define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \
180 IEMOP_HLP_DONE_DECODING(); \
181 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
182 } \
183 } \
184 (void)0
185
186#define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \
187 IEM_MC_BEGIN(3, 3, 0, 0); \
188 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
189 IEM_MC_ARG(uint8_t, u8Src, 1); \
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
192 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
193 \
194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
195 IEMOP_HLP_DONE_DECODING(); \
196 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
197 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
198 IEM_MC_FETCH_EFLAGS(EFlags); \
199 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
200 \
201 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bMapInfoDst); \
202 IEM_MC_COMMIT_EFLAGS(EFlags); \
203 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
204 IEM_MC_END(); \
205 } \
206 } \
207 (void)0
208
209/**
210 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
211 * destination.
212 */
213#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
215 \
216 /* \
217 * If rm is denoting a register, no more instruction bytes. \
218 */ \
219 if (IEM_IS_MODRM_REG_MODE(bRm)) \
220 { \
221 IEM_MC_BEGIN(3, 0, 0, 0); \
222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
223 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
224 IEM_MC_ARG(uint8_t, u8Src, 1); \
225 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
226 \
227 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
228 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
229 IEM_MC_REF_EFLAGS(pEFlags); \
230 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
231 \
232 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
233 IEM_MC_END(); \
234 } \
235 else \
236 { \
237 /* \
238 * We're accessing memory. \
239 */ \
240 IEM_MC_BEGIN(3, 1, 0, 0); \
241 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
242 IEM_MC_ARG(uint8_t, u8Src, 1); \
243 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
245 \
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
248 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
249 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
250 IEM_MC_REF_EFLAGS(pEFlags); \
251 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
252 \
253 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
254 IEM_MC_END(); \
255 } \
256 (void)0
257
258
259/**
260 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
261 * memory/register as the destination.
262 */
263#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
265 \
266 /* \
267 * If rm is denoting a register, no more instruction bytes. \
268 */ \
269 if (IEM_IS_MODRM_REG_MODE(bRm)) \
270 { \
271 switch (pVCpu->iem.s.enmEffOpSize) \
272 { \
273 case IEMMODE_16BIT: \
274 IEM_MC_BEGIN(3, 0, 0, 0); \
275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
276 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
277 IEM_MC_ARG(uint16_t, u16Src, 1); \
278 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
279 \
280 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
281 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
282 IEM_MC_REF_EFLAGS(pEFlags); \
283 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
284 \
285 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
286 IEM_MC_END(); \
287 break; \
288 \
289 case IEMMODE_32BIT: \
290 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
292 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
293 IEM_MC_ARG(uint32_t, u32Src, 1); \
294 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
295 \
296 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
297 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
298 IEM_MC_REF_EFLAGS(pEFlags); \
299 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
300 \
301 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
302 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
303 IEM_MC_END(); \
304 break; \
305 \
306 case IEMMODE_64BIT: \
307 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
309 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
310 IEM_MC_ARG(uint64_t, u64Src, 1); \
311 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
312 \
313 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
314 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
315 IEM_MC_REF_EFLAGS(pEFlags); \
316 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
317 \
318 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
319 IEM_MC_END(); \
320 break; \
321 \
322 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
323 } \
324 } \
325 else \
326 { \
327 /* \
328 * We're accessing memory. \
329 * Note! We're putting the eflags on the stack here so we can commit them \
330 * after the memory. \
331 */ \
332 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
333 { \
334 switch (pVCpu->iem.s.enmEffOpSize) \
335 { \
336 case IEMMODE_16BIT: \
337 IEM_MC_BEGIN(3, 3, 0, 0); \
338 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
339 IEM_MC_ARG(uint16_t, u16Src, 1); \
340 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
342 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
343 \
344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
345 IEMOP_HLP_DONE_DECODING(); \
346 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
347 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
348 IEM_MC_FETCH_EFLAGS(EFlags); \
349 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
350 \
351 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
352 IEM_MC_COMMIT_EFLAGS(EFlags); \
353 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
354 IEM_MC_END(); \
355 break; \
356 \
357 case IEMMODE_32BIT: \
358 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
359 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
360 IEM_MC_ARG(uint32_t, u32Src, 1); \
361 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
363 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
364 \
365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
366 IEMOP_HLP_DONE_DECODING(); \
367 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
368 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
369 IEM_MC_FETCH_EFLAGS(EFlags); \
370 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
371 \
372 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
373 IEM_MC_COMMIT_EFLAGS(EFlags); \
374 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
375 IEM_MC_END(); \
376 break; \
377 \
378 case IEMMODE_64BIT: \
379 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
380 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
381 IEM_MC_ARG(uint64_t, u64Src, 1); \
382 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
384 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
385 \
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
387 IEMOP_HLP_DONE_DECODING(); \
388 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
389 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
390 IEM_MC_FETCH_EFLAGS(EFlags); \
391 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
392 \
393 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
394 IEM_MC_COMMIT_EFLAGS(EFlags); \
395 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
396 IEM_MC_END(); \
397 break; \
398 \
399 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
400 } \
401 } \
402 else \
403 { \
404 (void)0
405/* Separate macro to work around parsing issue in IEMAllInstPython.py */
406#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
407 switch (pVCpu->iem.s.enmEffOpSize) \
408 { \
409 case IEMMODE_16BIT: \
410 IEM_MC_BEGIN(3, 3, 0, 0); \
411 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
412 IEM_MC_ARG(uint16_t, u16Src, 1); \
413 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
415 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
416 \
417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
418 IEMOP_HLP_DONE_DECODING(); \
419 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
420 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
421 IEM_MC_FETCH_EFLAGS(EFlags); \
422 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
423 \
424 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
425 IEM_MC_COMMIT_EFLAGS(EFlags); \
426 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
427 IEM_MC_END(); \
428 break; \
429 \
430 case IEMMODE_32BIT: \
431 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
432 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
433 IEM_MC_ARG(uint32_t, u32Src, 1); \
434 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
436 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
437 \
438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
439 IEMOP_HLP_DONE_DECODING(); \
440 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
441 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
442 IEM_MC_FETCH_EFLAGS(EFlags); \
443 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
444 \
445 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo /* CMP,TEST */); \
446 IEM_MC_COMMIT_EFLAGS(EFlags); \
447 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
448 IEM_MC_END(); \
449 break; \
450 \
451 case IEMMODE_64BIT: \
452 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
453 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
454 IEM_MC_ARG(uint64_t, u64Src, 1); \
455 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
457 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
458 \
459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
460 IEMOP_HLP_DONE_DECODING(); \
461 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
462 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
463 IEM_MC_FETCH_EFLAGS(EFlags); \
464 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
465 \
466 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
467 IEM_MC_COMMIT_EFLAGS(EFlags); \
468 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
469 IEM_MC_END(); \
470 break; \
471 \
472 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
473 } \
474 } \
475 } \
476 (void)0
477
478/**
479 * Body for read-only word/dword/qword instructions like TEST and CMP with
480 * memory/register as the destination.
481 */
482#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
484 \
485 /* \
486 * If rm is denoting a register, no more instruction bytes. \
487 */ \
488 if (IEM_IS_MODRM_REG_MODE(bRm)) \
489 { \
490 switch (pVCpu->iem.s.enmEffOpSize) \
491 { \
492 case IEMMODE_16BIT: \
493 IEM_MC_BEGIN(3, 0, 0, 0); \
494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
495 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
496 IEM_MC_ARG(uint16_t, u16Src, 1); \
497 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
498 \
499 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
500 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
501 IEM_MC_REF_EFLAGS(pEFlags); \
502 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
503 \
504 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
505 IEM_MC_END(); \
506 break; \
507 \
508 case IEMMODE_32BIT: \
509 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
511 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
512 IEM_MC_ARG(uint32_t, u32Src, 1); \
513 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
514 \
515 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
516 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
517 IEM_MC_REF_EFLAGS(pEFlags); \
518 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
519 \
520 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
521 IEM_MC_END(); \
522 break; \
523 \
524 case IEMMODE_64BIT: \
525 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
527 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
528 IEM_MC_ARG(uint64_t, u64Src, 1); \
529 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
530 \
531 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
532 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
533 IEM_MC_REF_EFLAGS(pEFlags); \
534 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
535 \
536 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
537 IEM_MC_END(); \
538 break; \
539 \
540 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
541 } \
542 } \
543 else \
544 { \
545 /* \
546 * We're accessing memory. \
547 * Note! We're putting the eflags on the stack here so we can commit them \
548 * after the memory. \
549 */ \
550 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
551 { \
552 switch (pVCpu->iem.s.enmEffOpSize) \
553 { \
554 case IEMMODE_16BIT: \
555 IEM_MC_BEGIN(3, 3, 0, 0); \
556 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
557 IEM_MC_ARG(uint16_t, u16Src, 1); \
558 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
560 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
561 \
562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
563 IEMOP_HLP_DONE_DECODING(); \
564 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
565 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
566 IEM_MC_FETCH_EFLAGS(EFlags); \
567 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
568 \
569 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
570 IEM_MC_COMMIT_EFLAGS(EFlags); \
571 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
572 IEM_MC_END(); \
573 break; \
574 \
575 case IEMMODE_32BIT: \
576 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
577 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
578 IEM_MC_ARG(uint32_t, u32Src, 1); \
579 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
581 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
582 \
583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
584 IEMOP_HLP_DONE_DECODING(); \
585 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
586 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
587 IEM_MC_FETCH_EFLAGS(EFlags); \
588 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
589 \
590 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
591 IEM_MC_COMMIT_EFLAGS(EFlags); \
592 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
593 IEM_MC_END(); \
594 break; \
595 \
596 case IEMMODE_64BIT: \
597 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
598 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
599 IEM_MC_ARG(uint64_t, u64Src, 1); \
600 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
602 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
603 \
604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
605 IEMOP_HLP_DONE_DECODING(); \
606 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
607 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
608 IEM_MC_FETCH_EFLAGS(EFlags); \
609 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
610 \
611 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
612 IEM_MC_COMMIT_EFLAGS(EFlags); \
613 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
614 IEM_MC_END(); \
615 break; \
616 \
617 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
618 } \
619 } \
620 else \
621 { \
622 IEMOP_HLP_DONE_DECODING(); \
623 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
624 } \
625 } \
626 (void)0
627
628
629/**
630 * Body for instructions like ADD, AND, OR, ++ with working on AL with
631 * a byte immediate.
632 */
633#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
634 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
635 \
636 IEM_MC_BEGIN(3, 0, 0, 0); \
637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
638 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
639 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
640 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
641 \
642 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
643 IEM_MC_REF_EFLAGS(pEFlags); \
644 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
645 \
646 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
647 IEM_MC_END()
648
649/**
650 * Body for instructions like ADD, AND, OR, ++ with working on
651 * AX/EAX/RAX with a word/dword immediate.
652 */
653#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
654 switch (pVCpu->iem.s.enmEffOpSize) \
655 { \
656 case IEMMODE_16BIT: \
657 { \
658 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
659 \
660 IEM_MC_BEGIN(3, 0, 0, 0); \
661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
662 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
663 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
664 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
665 \
666 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
667 IEM_MC_REF_EFLAGS(pEFlags); \
668 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
669 \
670 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
671 IEM_MC_END(); \
672 } \
673 \
674 case IEMMODE_32BIT: \
675 { \
676 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
677 \
678 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
680 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
681 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
682 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
683 \
684 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
685 IEM_MC_REF_EFLAGS(pEFlags); \
686 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
687 \
688 if (a_fModifiesDstReg) \
689 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
690 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
691 IEM_MC_END(); \
692 } \
693 \
694 case IEMMODE_64BIT: \
695 { \
696 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
697 \
698 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
700 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
701 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
702 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
703 \
704 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
705 IEM_MC_REF_EFLAGS(pEFlags); \
706 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
707 \
708 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
709 IEM_MC_END(); \
710 } \
711 \
712 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
713 } \
714 (void)0
715
716
717
718/* Instruction specification format - work in progress: */
719
720/**
721 * @opcode 0x00
722 * @opmnemonic add
723 * @op1 rm:Eb
724 * @op2 reg:Gb
725 * @opmaps one
726 * @openc ModR/M
727 * @opflmodify cf,pf,af,zf,sf,of
728 * @ophints harmless ignores_op_sizes
729 * @opstats add_Eb_Gb
730 * @opgroup og_gen_arith_bin
731 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
732 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
733 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
734 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
735 */
736FNIEMOP_DEF(iemOp_add_Eb_Gb)
737{
738 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
739 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_add_u8);
740 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked);
741}
742
743
744/**
745 * @opcode 0x01
746 * @opgroup og_gen_arith_bin
747 * @opflmodify cf,pf,af,zf,sf,of
748 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
749 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
750 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
751 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
752 */
753FNIEMOP_DEF(iemOp_add_Ev_Gv)
754{
755 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
756 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
757 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
758}
759
760
761/**
762 * @opcode 0x02
763 * @opgroup og_gen_arith_bin
764 * @opflmodify cf,pf,af,zf,sf,of
765 * @opcopytests iemOp_add_Eb_Gb
766 */
767FNIEMOP_DEF(iemOp_add_Gb_Eb)
768{
769 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
770 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
771}
772
773
774/**
775 * @opcode 0x03
776 * @opgroup og_gen_arith_bin
777 * @opflmodify cf,pf,af,zf,sf,of
778 * @opcopytests iemOp_add_Ev_Gv
779 */
780FNIEMOP_DEF(iemOp_add_Gv_Ev)
781{
782 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
783 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1, 0);
784}
785
786
787/**
788 * @opcode 0x04
789 * @opgroup og_gen_arith_bin
790 * @opflmodify cf,pf,af,zf,sf,of
791 * @opcopytests iemOp_add_Eb_Gb
792 */
793FNIEMOP_DEF(iemOp_add_Al_Ib)
794{
795 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
796 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
797}
798
799
800/**
801 * @opcode 0x05
802 * @opgroup og_gen_arith_bin
803 * @opflmodify cf,pf,af,zf,sf,of
804 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
805 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
806 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
807 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
808 */
809FNIEMOP_DEF(iemOp_add_eAX_Iz)
810{
811 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
812 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
813}
814
815
816/**
817 * @opcode 0x06
818 * @opgroup og_stack_sreg
819 */
820FNIEMOP_DEF(iemOp_push_ES)
821{
822 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
823 IEMOP_HLP_NO_64BIT();
824 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
825}
826
827
828/**
829 * @opcode 0x07
830 * @opgroup og_stack_sreg
831 */
832FNIEMOP_DEF(iemOp_pop_ES)
833{
834 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
835 IEMOP_HLP_NO_64BIT();
836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
837 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
838 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
839 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
840 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
841 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES),
842 iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
843}
844
845
846/**
847 * @opcode 0x08
848 * @opgroup og_gen_arith_bin
849 * @opflmodify cf,pf,af,zf,sf,of
850 * @opflundef af
851 * @opflclear of,cf
852 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
853 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
854 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
855 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
856 */
857FNIEMOP_DEF(iemOp_or_Eb_Gb)
858{
859 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
860 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
861 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_or_u8);
862 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked);
863}
864
865
866/*
867 * @opcode 0x09
868 * @opgroup og_gen_arith_bin
869 * @opflmodify cf,pf,af,zf,sf,of
870 * @opflundef af
871 * @opflclear of,cf
872 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
873 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
874 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
875 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
876 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
877 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
878 */
879FNIEMOP_DEF(iemOp_or_Ev_Gv)
880{
881 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
882 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
883 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
884 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
885}
886
887
888/**
889 * @opcode 0x0a
890 * @opgroup og_gen_arith_bin
891 * @opflmodify cf,pf,af,zf,sf,of
892 * @opflundef af
893 * @opflclear of,cf
894 * @opcopytests iemOp_or_Eb_Gb
895 */
896FNIEMOP_DEF(iemOp_or_Gb_Eb)
897{
898 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
899 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
900 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
901}
902
903
904/**
905 * @opcode 0x0b
906 * @opgroup og_gen_arith_bin
907 * @opflmodify cf,pf,af,zf,sf,of
908 * @opflundef af
909 * @opflclear of,cf
910 * @opcopytests iemOp_or_Ev_Gv
911 */
912FNIEMOP_DEF(iemOp_or_Gv_Ev)
913{
914 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
915 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
916 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1, 0);
917}
918
919
920/**
921 * @opcode 0x0c
922 * @opgroup og_gen_arith_bin
923 * @opflmodify cf,pf,af,zf,sf,of
924 * @opflundef af
925 * @opflclear of,cf
926 * @opcopytests iemOp_or_Eb_Gb
927 */
928FNIEMOP_DEF(iemOp_or_Al_Ib)
929{
930 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
931 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
932 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
933}
934
935
936/**
937 * @opcode 0x0d
938 * @opgroup og_gen_arith_bin
939 * @opflmodify cf,pf,af,zf,sf,of
940 * @opflundef af
941 * @opflclear of,cf
942 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
943 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
944 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
945 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
946 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
947 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
948 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
949 */
950FNIEMOP_DEF(iemOp_or_eAX_Iz)
951{
952 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
953 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
954 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
955}
956
957
958/**
959 * @opcode 0x0e
960 * @opgroup og_stack_sreg
961 */
962FNIEMOP_DEF(iemOp_push_CS)
963{
964 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
965 IEMOP_HLP_NO_64BIT();
966 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
967}
968
969
970/**
971 * @opcode 0x0f
972 * @opmnemonic EscTwo0f
973 * @openc two0f
974 * @opdisenum OP_2B_ESC
975 * @ophints harmless
976 * @opgroup og_escapes
977 */
978FNIEMOP_DEF(iemOp_2byteEscape)
979{
980#if 0 /// @todo def VBOX_STRICT
981 /* Sanity check the table the first time around. */
982 static bool s_fTested = false;
983 if (RT_LIKELY(s_fTested)) { /* likely */ }
984 else
985 {
986 s_fTested = true;
987 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
988 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
989 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
990 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
991 }
992#endif
993
994 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
995 {
996 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
997 IEMOP_HLP_MIN_286();
998 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
999 }
1000 /* @opdone */
1001
1002 /*
1003 * On the 8086 this is a POP CS instruction.
1004 * For the time being we don't specify this this.
1005 */
1006 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
1007 IEMOP_HLP_NO_64BIT();
1008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1009 /** @todo eliminate END_TB here */
1010 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1011 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1012 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_CS),
1013 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1014}
1015
1016/**
1017 * @opcode 0x10
1018 * @opgroup og_gen_arith_bin
1019 * @opfltest cf
1020 * @opflmodify cf,pf,af,zf,sf,of
1021 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1022 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1023 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1024 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1025 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1026 */
1027FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1028{
1029 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1030 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_adc_u8);
1031 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked);
1032}
1033
1034
1035/**
1036 * @opcode 0x11
1037 * @opgroup og_gen_arith_bin
1038 * @opfltest cf
1039 * @opflmodify cf,pf,af,zf,sf,of
1040 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1041 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1042 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1043 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1044 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1045 */
1046FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1047{
1048 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1049 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
1050 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1051}
1052
1053
1054/**
1055 * @opcode 0x12
1056 * @opgroup og_gen_arith_bin
1057 * @opfltest cf
1058 * @opflmodify cf,pf,af,zf,sf,of
1059 * @opcopytests iemOp_adc_Eb_Gb
1060 */
1061FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1062{
1063 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1064 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
1065}
1066
1067
1068/**
1069 * @opcode 0x13
1070 * @opgroup og_gen_arith_bin
1071 * @opfltest cf
1072 * @opflmodify cf,pf,af,zf,sf,of
1073 * @opcopytests iemOp_adc_Ev_Gv
1074 */
1075FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1076{
1077 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1078 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1, 0);
1079}
1080
1081
1082/**
1083 * @opcode 0x14
1084 * @opgroup og_gen_arith_bin
1085 * @opfltest cf
1086 * @opflmodify cf,pf,af,zf,sf,of
1087 * @opcopytests iemOp_adc_Eb_Gb
1088 */
1089FNIEMOP_DEF(iemOp_adc_Al_Ib)
1090{
1091 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1092 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1093}
1094
1095
1096/**
1097 * @opcode 0x15
1098 * @opgroup og_gen_arith_bin
1099 * @opfltest cf
1100 * @opflmodify cf,pf,af,zf,sf,of
1101 * @opcopytests iemOp_adc_Ev_Gv
1102 */
1103FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1104{
1105 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1106 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1107}
1108
1109
1110/**
1111 * @opcode 0x16
1112 */
1113FNIEMOP_DEF(iemOp_push_SS)
1114{
1115 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1116 IEMOP_HLP_NO_64BIT();
1117 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1118}
1119
1120
1121/**
1122 * @opcode 0x17
1123 * @opgroup og_gen_arith_bin
1124 * @opfltest cf
1125 * @opflmodify cf,pf,af,zf,sf,of
1126 */
1127FNIEMOP_DEF(iemOp_pop_SS)
1128{
1129 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1131 IEMOP_HLP_NO_64BIT();
1132 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1133 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1134 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_SS)
1135 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_SS)
1136 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_SS),
1137 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1138}
1139
1140
1141/**
1142 * @opcode 0x18
1143 * @opgroup og_gen_arith_bin
1144 * @opfltest cf
1145 * @opflmodify cf,pf,af,zf,sf,of
1146 */
1147FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1148{
1149 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1150 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sbb_u8);
1151 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked);
1152}
1153
1154
1155/**
1156 * @opcode 0x19
1157 * @opgroup og_gen_arith_bin
1158 * @opfltest cf
1159 * @opflmodify cf,pf,af,zf,sf,of
1160 */
1161FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1162{
1163 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1164 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
1165 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1166}
1167
1168
1169/**
1170 * @opcode 0x1a
1171 * @opgroup og_gen_arith_bin
1172 * @opfltest cf
1173 * @opflmodify cf,pf,af,zf,sf,of
1174 */
1175FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1176{
1177 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1178 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
1179}
1180
1181
1182/**
1183 * @opcode 0x1b
1184 * @opgroup og_gen_arith_bin
1185 * @opfltest cf
1186 * @opflmodify cf,pf,af,zf,sf,of
1187 */
1188FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1189{
1190 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1191 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1, 0);
1192}
1193
1194
1195/**
1196 * @opcode 0x1c
1197 * @opgroup og_gen_arith_bin
1198 * @opfltest cf
1199 * @opflmodify cf,pf,af,zf,sf,of
1200 */
1201FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1202{
1203 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1204 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1205}
1206
1207
1208/**
1209 * @opcode 0x1d
1210 * @opgroup og_gen_arith_bin
1211 * @opfltest cf
1212 * @opflmodify cf,pf,af,zf,sf,of
1213 */
1214FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1215{
1216 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1217 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1218}
1219
1220
1221/**
1222 * @opcode 0x1e
1223 * @opgroup og_stack_sreg
1224 */
1225FNIEMOP_DEF(iemOp_push_DS)
1226{
1227 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1228 IEMOP_HLP_NO_64BIT();
1229 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1230}
1231
1232
1233/**
1234 * @opcode 0x1f
1235 * @opgroup og_stack_sreg
1236 */
1237FNIEMOP_DEF(iemOp_pop_DS)
1238{
1239 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1241 IEMOP_HLP_NO_64BIT();
1242 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1243 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1244 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
1245 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
1246 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS),
1247 iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1248}
1249
1250
1251/**
1252 * @opcode 0x20
1253 * @opgroup og_gen_arith_bin
1254 * @opflmodify cf,pf,af,zf,sf,of
1255 * @opflundef af
1256 * @opflclear of,cf
1257 */
1258FNIEMOP_DEF(iemOp_and_Eb_Gb)
1259{
1260 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1261 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1262 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_and_u8);
1263 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked);
1264}
1265
1266
1267/**
1268 * @opcode 0x21
1269 * @opgroup og_gen_arith_bin
1270 * @opflmodify cf,pf,af,zf,sf,of
1271 * @opflundef af
1272 * @opflclear of,cf
1273 */
1274FNIEMOP_DEF(iemOp_and_Ev_Gv)
1275{
1276 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1277 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1278 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
1279 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1280}
1281
1282
1283/**
1284 * @opcode 0x22
1285 * @opgroup og_gen_arith_bin
1286 * @opflmodify cf,pf,af,zf,sf,of
1287 * @opflundef af
1288 * @opflclear of,cf
1289 */
1290FNIEMOP_DEF(iemOp_and_Gb_Eb)
1291{
1292 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1293 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1294 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1295}
1296
1297
1298/**
1299 * @opcode 0x23
1300 * @opgroup og_gen_arith_bin
1301 * @opflmodify cf,pf,af,zf,sf,of
1302 * @opflundef af
1303 * @opflclear of,cf
1304 */
1305FNIEMOP_DEF(iemOp_and_Gv_Ev)
1306{
1307 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1308 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1309 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1, 0);
1310}
1311
1312
1313/**
1314 * @opcode 0x24
1315 * @opgroup og_gen_arith_bin
1316 * @opflmodify cf,pf,af,zf,sf,of
1317 * @opflundef af
1318 * @opflclear of,cf
1319 */
1320FNIEMOP_DEF(iemOp_and_Al_Ib)
1321{
1322 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1323 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1324 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1325}
1326
1327
1328/**
1329 * @opcode 0x25
1330 * @opgroup og_gen_arith_bin
1331 * @opflmodify cf,pf,af,zf,sf,of
1332 * @opflundef af
1333 * @opflclear of,cf
1334 */
1335FNIEMOP_DEF(iemOp_and_eAX_Iz)
1336{
1337 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1338 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1339 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1340}
1341
1342
1343/**
1344 * @opcode 0x26
1345 * @opmnemonic SEG
1346 * @op1 ES
1347 * @opgroup og_prefix
1348 * @openc prefix
1349 * @opdisenum OP_SEG
1350 * @ophints harmless
1351 */
1352FNIEMOP_DEF(iemOp_seg_ES)
1353{
1354 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1355 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1356 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1357
1358 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1359 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1360}
1361
1362
1363/**
1364 * @opcode 0x27
1365 * @opfltest af,cf
1366 * @opflmodify cf,pf,af,zf,sf,of
1367 * @opflundef of
1368 */
1369FNIEMOP_DEF(iemOp_daa)
1370{
1371 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1372 IEMOP_HLP_NO_64BIT();
1373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1374 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1375 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_daa);
1376}
1377
1378
1379/**
1380 * @opcode 0x28
1381 * @opgroup og_gen_arith_bin
1382 * @opflmodify cf,pf,af,zf,sf,of
1383 */
1384FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1385{
1386 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1387 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sub_u8);
1388 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked);
1389}
1390
1391
1392/**
1393 * @opcode 0x29
1394 * @opgroup og_gen_arith_bin
1395 * @opflmodify cf,pf,af,zf,sf,of
1396 */
1397FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1398{
1399 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1400 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
1401 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1402}
1403
1404
1405/**
1406 * @opcode 0x2a
1407 * @opgroup og_gen_arith_bin
1408 * @opflmodify cf,pf,af,zf,sf,of
1409 */
1410FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1411{
1412 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1413 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1414}
1415
1416
1417/**
1418 * @opcode 0x2b
1419 * @opgroup og_gen_arith_bin
1420 * @opflmodify cf,pf,af,zf,sf,of
1421 */
1422FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1423{
1424 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1425 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1, 0);
1426}
1427
1428
1429/**
1430 * @opcode 0x2c
1431 * @opgroup og_gen_arith_bin
1432 * @opflmodify cf,pf,af,zf,sf,of
1433 */
1434FNIEMOP_DEF(iemOp_sub_Al_Ib)
1435{
1436 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1437 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1438}
1439
1440
1441/**
1442 * @opcode 0x2d
1443 * @opgroup og_gen_arith_bin
1444 * @opflmodify cf,pf,af,zf,sf,of
1445 */
1446FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1447{
1448 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1449 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1450}
1451
1452
1453/**
1454 * @opcode 0x2e
1455 * @opmnemonic SEG
1456 * @op1 CS
1457 * @opgroup og_prefix
1458 * @openc prefix
1459 * @opdisenum OP_SEG
1460 * @ophints harmless
1461 */
1462FNIEMOP_DEF(iemOp_seg_CS)
1463{
1464 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1465 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1466 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1467
1468 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1469 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1470}
1471
1472
1473/**
1474 * @opcode 0x2f
1475 * @opfltest af,cf
1476 * @opflmodify cf,pf,af,zf,sf,of
1477 * @opflundef of
1478 */
1479FNIEMOP_DEF(iemOp_das)
1480{
1481 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1482 IEMOP_HLP_NO_64BIT();
1483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1484 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1485 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_das);
1486}
1487
1488
1489/**
1490 * @opcode 0x30
1491 * @opgroup og_gen_arith_bin
1492 * @opflmodify cf,pf,af,zf,sf,of
1493 * @opflundef af
1494 * @opflclear of,cf
1495 */
1496FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1497{
1498 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1499 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1500 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_xor_u8);
1501 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked);
1502}
1503
1504
1505/**
1506 * @opcode 0x31
1507 * @opgroup og_gen_arith_bin
1508 * @opflmodify cf,pf,af,zf,sf,of
1509 * @opflundef af
1510 * @opflclear of,cf
1511 */
1512FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1513{
1514 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1515 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1516 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
1517 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1518}
1519
1520
1521/**
1522 * @opcode 0x32
1523 * @opgroup og_gen_arith_bin
1524 * @opflmodify cf,pf,af,zf,sf,of
1525 * @opflundef af
1526 * @opflclear of,cf
1527 */
1528FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1529{
1530 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1531 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1532 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1533}
1534
1535
1536/**
1537 * @opcode 0x33
1538 * @opgroup og_gen_arith_bin
1539 * @opflmodify cf,pf,af,zf,sf,of
1540 * @opflundef af
1541 * @opflclear of,cf
1542 */
1543FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1544{
1545 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1546 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1547 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1, 0);
1548}
1549
1550
1551/**
1552 * @opcode 0x34
1553 * @opgroup og_gen_arith_bin
1554 * @opflmodify cf,pf,af,zf,sf,of
1555 * @opflundef af
1556 * @opflclear of,cf
1557 */
1558FNIEMOP_DEF(iemOp_xor_Al_Ib)
1559{
1560 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1561 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1562 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1563}
1564
1565
1566/**
1567 * @opcode 0x35
1568 * @opgroup og_gen_arith_bin
1569 * @opflmodify cf,pf,af,zf,sf,of
1570 * @opflundef af
1571 * @opflclear of,cf
1572 */
1573FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1574{
1575 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1576 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1577 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1578}
1579
1580
1581/**
1582 * @opcode 0x36
1583 * @opmnemonic SEG
1584 * @op1 SS
1585 * @opgroup og_prefix
1586 * @openc prefix
1587 * @opdisenum OP_SEG
1588 * @ophints harmless
1589 */
1590FNIEMOP_DEF(iemOp_seg_SS)
1591{
1592 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1593 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1594 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1595
1596 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1597 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1598}
1599
1600
1601/**
1602 * @opcode 0x37
1603 * @opfltest af,cf
1604 * @opflmodify cf,pf,af,zf,sf,of
1605 * @opflundef pf,zf,sf,of
1606 * @opgroup og_gen_arith_dec
1607 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1608 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1609 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1610 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1611 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1612 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1613 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1614 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1615 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1616 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1617 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1618 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1619 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1620 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1621 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1622 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1623 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1624 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1625 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1626 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1627 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1628 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1629 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1630 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1631 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1632 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1633 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1634 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1635 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1636 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1637 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1638 */
1639FNIEMOP_DEF(iemOp_aaa)
1640{
1641 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1642 IEMOP_HLP_NO_64BIT();
1643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1644 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1645
1646 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aaa);
1647}
1648
1649
1650/**
1651 * @opcode 0x38
1652 */
1653FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1654{
1655 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1656 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8);
1657 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
1658}
1659
1660
1661/**
1662 * @opcode 0x39
1663 */
1664FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1665{
1666 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1667 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
1668}
1669
1670
1671/**
1672 * @opcode 0x3a
1673 */
1674FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1675{
1676 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1677 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1678}
1679
1680
1681/**
1682 * @opcode 0x3b
1683 */
1684FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1685{
1686 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1687 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0, 0);
1688}
1689
1690
1691/**
1692 * @opcode 0x3c
1693 */
1694FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1695{
1696 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1697 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1698}
1699
1700
1701/**
1702 * @opcode 0x3d
1703 */
1704FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1705{
1706 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1707 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1708}
1709
1710
1711/**
1712 * @opcode 0x3e
1713 */
1714FNIEMOP_DEF(iemOp_seg_DS)
1715{
1716 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1717 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1718 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1719
1720 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1721 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1722}
1723
1724
1725/**
1726 * @opcode 0x3f
1727 * @opfltest af,cf
1728 * @opflmodify cf,pf,af,zf,sf,of
1729 * @opflundef pf,zf,sf,of
1730 * @opgroup og_gen_arith_dec
1731 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1732 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1733 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1734 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1735 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1736 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1737 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1738 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1739 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1740 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1741 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1742 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1743 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1744 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1745 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1746 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1747 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1748 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1749 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1750 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1751 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1752 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1753 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1754 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1755 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1756 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1757 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1758 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1759 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1760 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1761 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1762 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1763 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1764 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1765 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1766 */
1767FNIEMOP_DEF(iemOp_aas)
1768{
1769 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1770 IEMOP_HLP_NO_64BIT();
1771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1772 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1773
1774 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aas);
1775}
1776
1777
1778/**
1779 * Common 'inc/dec register' helper.
1780 *
1781 * Not for 64-bit code, only for what became the rex prefixes.
1782 */
1783#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1784 switch (pVCpu->iem.s.enmEffOpSize) \
1785 { \
1786 case IEMMODE_16BIT: \
1787 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT, 0); \
1788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1789 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1790 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1791 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1792 IEM_MC_REF_EFLAGS(pEFlags); \
1793 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1794 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1795 IEM_MC_END(); \
1796 break; \
1797 \
1798 case IEMMODE_32BIT: \
1799 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
1800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1801 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1802 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1803 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1804 IEM_MC_REF_EFLAGS(pEFlags); \
1805 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1806 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
1807 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1808 IEM_MC_END(); \
1809 break; \
1810 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1811 } \
1812 (void)0
1813
1814/**
1815 * @opcode 0x40
1816 */
1817FNIEMOP_DEF(iemOp_inc_eAX)
1818{
1819 /*
1820 * This is a REX prefix in 64-bit mode.
1821 */
1822 if (IEM_IS_64BIT_CODE(pVCpu))
1823 {
1824 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1825 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1826
1827 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1828 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1829 }
1830
1831 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1832 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1833}
1834
1835
1836/**
1837 * @opcode 0x41
1838 */
1839FNIEMOP_DEF(iemOp_inc_eCX)
1840{
1841 /*
1842 * This is a REX prefix in 64-bit mode.
1843 */
1844 if (IEM_IS_64BIT_CODE(pVCpu))
1845 {
1846 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1847 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1848 pVCpu->iem.s.uRexB = 1 << 3;
1849
1850 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1851 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1852 }
1853
1854 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1855 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1856}
1857
1858
1859/**
1860 * @opcode 0x42
1861 */
1862FNIEMOP_DEF(iemOp_inc_eDX)
1863{
1864 /*
1865 * This is a REX prefix in 64-bit mode.
1866 */
1867 if (IEM_IS_64BIT_CODE(pVCpu))
1868 {
1869 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1870 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1871 pVCpu->iem.s.uRexIndex = 1 << 3;
1872
1873 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1874 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1875 }
1876
1877 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1878 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1879}
1880
1881
1882
1883/**
1884 * @opcode 0x43
1885 */
1886FNIEMOP_DEF(iemOp_inc_eBX)
1887{
1888 /*
1889 * This is a REX prefix in 64-bit mode.
1890 */
1891 if (IEM_IS_64BIT_CODE(pVCpu))
1892 {
1893 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1894 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1895 pVCpu->iem.s.uRexB = 1 << 3;
1896 pVCpu->iem.s.uRexIndex = 1 << 3;
1897
1898 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1899 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1900 }
1901
1902 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1903 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1904}
1905
1906
1907/**
1908 * @opcode 0x44
1909 */
1910FNIEMOP_DEF(iemOp_inc_eSP)
1911{
1912 /*
1913 * This is a REX prefix in 64-bit mode.
1914 */
1915 if (IEM_IS_64BIT_CODE(pVCpu))
1916 {
1917 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1918 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1919 pVCpu->iem.s.uRexReg = 1 << 3;
1920
1921 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1922 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1923 }
1924
1925 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1926 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1927}
1928
1929
1930/**
1931 * @opcode 0x45
1932 */
1933FNIEMOP_DEF(iemOp_inc_eBP)
1934{
1935 /*
1936 * This is a REX prefix in 64-bit mode.
1937 */
1938 if (IEM_IS_64BIT_CODE(pVCpu))
1939 {
1940 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1941 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1942 pVCpu->iem.s.uRexReg = 1 << 3;
1943 pVCpu->iem.s.uRexB = 1 << 3;
1944
1945 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1946 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1947 }
1948
1949 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1950 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1951}
1952
1953
1954/**
1955 * @opcode 0x46
1956 */
1957FNIEMOP_DEF(iemOp_inc_eSI)
1958{
1959 /*
1960 * This is a REX prefix in 64-bit mode.
1961 */
1962 if (IEM_IS_64BIT_CODE(pVCpu))
1963 {
1964 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1965 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1966 pVCpu->iem.s.uRexReg = 1 << 3;
1967 pVCpu->iem.s.uRexIndex = 1 << 3;
1968
1969 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1970 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1971 }
1972
1973 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1974 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1975}
1976
1977
1978/**
1979 * @opcode 0x47
1980 */
1981FNIEMOP_DEF(iemOp_inc_eDI)
1982{
1983 /*
1984 * This is a REX prefix in 64-bit mode.
1985 */
1986 if (IEM_IS_64BIT_CODE(pVCpu))
1987 {
1988 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1989 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1990 pVCpu->iem.s.uRexReg = 1 << 3;
1991 pVCpu->iem.s.uRexB = 1 << 3;
1992 pVCpu->iem.s.uRexIndex = 1 << 3;
1993
1994 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1995 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1996 }
1997
1998 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1999 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
2000}
2001
2002
2003/**
2004 * @opcode 0x48
2005 */
2006FNIEMOP_DEF(iemOp_dec_eAX)
2007{
2008 /*
2009 * This is a REX prefix in 64-bit mode.
2010 */
2011 if (IEM_IS_64BIT_CODE(pVCpu))
2012 {
2013 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
2014 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
2015 iemRecalEffOpSize(pVCpu);
2016
2017 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2018 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2019 }
2020
2021 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2022 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2023}
2024
2025
2026/**
2027 * @opcode 0x49
2028 */
2029FNIEMOP_DEF(iemOp_dec_eCX)
2030{
2031 /*
2032 * This is a REX prefix in 64-bit mode.
2033 */
2034 if (IEM_IS_64BIT_CODE(pVCpu))
2035 {
2036 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2037 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2038 pVCpu->iem.s.uRexB = 1 << 3;
2039 iemRecalEffOpSize(pVCpu);
2040
2041 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2042 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2043 }
2044
2045 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2046 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2047}
2048
2049
2050/**
2051 * @opcode 0x4a
2052 */
2053FNIEMOP_DEF(iemOp_dec_eDX)
2054{
2055 /*
2056 * This is a REX prefix in 64-bit mode.
2057 */
2058 if (IEM_IS_64BIT_CODE(pVCpu))
2059 {
2060 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2061 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2062 pVCpu->iem.s.uRexIndex = 1 << 3;
2063 iemRecalEffOpSize(pVCpu);
2064
2065 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2066 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2067 }
2068
2069 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2070 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2071}
2072
2073
2074/**
2075 * @opcode 0x4b
2076 */
2077FNIEMOP_DEF(iemOp_dec_eBX)
2078{
2079 /*
2080 * This is a REX prefix in 64-bit mode.
2081 */
2082 if (IEM_IS_64BIT_CODE(pVCpu))
2083 {
2084 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2085 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2086 pVCpu->iem.s.uRexB = 1 << 3;
2087 pVCpu->iem.s.uRexIndex = 1 << 3;
2088 iemRecalEffOpSize(pVCpu);
2089
2090 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2091 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2092 }
2093
2094 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2095 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2096}
2097
2098
2099/**
2100 * @opcode 0x4c
2101 */
2102FNIEMOP_DEF(iemOp_dec_eSP)
2103{
2104 /*
2105 * This is a REX prefix in 64-bit mode.
2106 */
2107 if (IEM_IS_64BIT_CODE(pVCpu))
2108 {
2109 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2110 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2111 pVCpu->iem.s.uRexReg = 1 << 3;
2112 iemRecalEffOpSize(pVCpu);
2113
2114 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2115 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2116 }
2117
2118 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2119 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2120}
2121
2122
2123/**
2124 * @opcode 0x4d
2125 */
2126FNIEMOP_DEF(iemOp_dec_eBP)
2127{
2128 /*
2129 * This is a REX prefix in 64-bit mode.
2130 */
2131 if (IEM_IS_64BIT_CODE(pVCpu))
2132 {
2133 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2134 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2135 pVCpu->iem.s.uRexReg = 1 << 3;
2136 pVCpu->iem.s.uRexB = 1 << 3;
2137 iemRecalEffOpSize(pVCpu);
2138
2139 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2140 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2141 }
2142
2143 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2144 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2145}
2146
2147
2148/**
2149 * @opcode 0x4e
2150 */
2151FNIEMOP_DEF(iemOp_dec_eSI)
2152{
2153 /*
2154 * This is a REX prefix in 64-bit mode.
2155 */
2156 if (IEM_IS_64BIT_CODE(pVCpu))
2157 {
2158 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2159 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2160 pVCpu->iem.s.uRexReg = 1 << 3;
2161 pVCpu->iem.s.uRexIndex = 1 << 3;
2162 iemRecalEffOpSize(pVCpu);
2163
2164 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2165 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2166 }
2167
2168 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2169 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2170}
2171
2172
2173/**
2174 * @opcode 0x4f
2175 */
2176FNIEMOP_DEF(iemOp_dec_eDI)
2177{
2178 /*
2179 * This is a REX prefix in 64-bit mode.
2180 */
2181 if (IEM_IS_64BIT_CODE(pVCpu))
2182 {
2183 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2184 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2185 pVCpu->iem.s.uRexReg = 1 << 3;
2186 pVCpu->iem.s.uRexB = 1 << 3;
2187 pVCpu->iem.s.uRexIndex = 1 << 3;
2188 iemRecalEffOpSize(pVCpu);
2189
2190 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2191 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2192 }
2193
2194 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2195 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2196}
2197
2198
2199/**
2200 * Common 'push register' helper.
2201 */
2202FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2203{
2204 if (IEM_IS_64BIT_CODE(pVCpu))
2205 {
2206 iReg |= pVCpu->iem.s.uRexB;
2207 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2208 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2209 }
2210
2211 switch (pVCpu->iem.s.enmEffOpSize)
2212 {
2213 case IEMMODE_16BIT:
2214 IEM_MC_BEGIN(0, 1, 0, 0);
2215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2216 IEM_MC_LOCAL(uint16_t, u16Value);
2217 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2218 IEM_MC_PUSH_U16(u16Value);
2219 IEM_MC_ADVANCE_RIP_AND_FINISH();
2220 IEM_MC_END();
2221 break;
2222
2223 case IEMMODE_32BIT:
2224 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2226 IEM_MC_LOCAL(uint32_t, u32Value);
2227 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2228 IEM_MC_PUSH_U32(u32Value);
2229 IEM_MC_ADVANCE_RIP_AND_FINISH();
2230 IEM_MC_END();
2231 break;
2232
2233 case IEMMODE_64BIT:
2234 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2236 IEM_MC_LOCAL(uint64_t, u64Value);
2237 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2238 IEM_MC_PUSH_U64(u64Value);
2239 IEM_MC_ADVANCE_RIP_AND_FINISH();
2240 IEM_MC_END();
2241 break;
2242
2243 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2244 }
2245}
2246
2247
2248/**
2249 * @opcode 0x50
2250 */
2251FNIEMOP_DEF(iemOp_push_eAX)
2252{
2253 IEMOP_MNEMONIC(push_rAX, "push rAX");
2254 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2255}
2256
2257
2258/**
2259 * @opcode 0x51
2260 */
2261FNIEMOP_DEF(iemOp_push_eCX)
2262{
2263 IEMOP_MNEMONIC(push_rCX, "push rCX");
2264 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2265}
2266
2267
2268/**
2269 * @opcode 0x52
2270 */
2271FNIEMOP_DEF(iemOp_push_eDX)
2272{
2273 IEMOP_MNEMONIC(push_rDX, "push rDX");
2274 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2275}
2276
2277
2278/**
2279 * @opcode 0x53
2280 */
2281FNIEMOP_DEF(iemOp_push_eBX)
2282{
2283 IEMOP_MNEMONIC(push_rBX, "push rBX");
2284 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2285}
2286
2287
2288/**
2289 * @opcode 0x54
2290 */
2291FNIEMOP_DEF(iemOp_push_eSP)
2292{
2293 IEMOP_MNEMONIC(push_rSP, "push rSP");
2294 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
2295 {
2296 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086, 0);
2297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2298 IEM_MC_LOCAL(uint16_t, u16Value);
2299 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2300 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2301 IEM_MC_PUSH_U16(u16Value);
2302 IEM_MC_ADVANCE_RIP_AND_FINISH();
2303 IEM_MC_END();
2304 }
2305 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2306}
2307
2308
2309/**
2310 * @opcode 0x55
2311 */
2312FNIEMOP_DEF(iemOp_push_eBP)
2313{
2314 IEMOP_MNEMONIC(push_rBP, "push rBP");
2315 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2316}
2317
2318
2319/**
2320 * @opcode 0x56
2321 */
2322FNIEMOP_DEF(iemOp_push_eSI)
2323{
2324 IEMOP_MNEMONIC(push_rSI, "push rSI");
2325 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2326}
2327
2328
2329/**
2330 * @opcode 0x57
2331 */
2332FNIEMOP_DEF(iemOp_push_eDI)
2333{
2334 IEMOP_MNEMONIC(push_rDI, "push rDI");
2335 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2336}
2337
2338
2339/**
2340 * Common 'pop register' helper.
2341 */
2342FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2343{
2344 if (IEM_IS_64BIT_CODE(pVCpu))
2345 {
2346 iReg |= pVCpu->iem.s.uRexB;
2347 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2348 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2349 }
2350
2351 switch (pVCpu->iem.s.enmEffOpSize)
2352 {
2353 case IEMMODE_16BIT:
2354 IEM_MC_BEGIN(0, 1, 0, 0);
2355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2356 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2357 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
2358 IEM_MC_POP_U16(pu16Dst);
2359 IEM_MC_ADVANCE_RIP_AND_FINISH();
2360 IEM_MC_END();
2361 break;
2362
2363 case IEMMODE_32BIT:
2364 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2366 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2367 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
2368 IEM_MC_POP_U32(pu32Dst);
2369 IEM_MC_CLEAR_HIGH_GREG_U64(iReg); /** @todo testcase*/
2370 IEM_MC_ADVANCE_RIP_AND_FINISH();
2371 IEM_MC_END();
2372 break;
2373
2374 case IEMMODE_64BIT:
2375 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2377 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2378 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
2379 IEM_MC_POP_U64(pu64Dst);
2380 IEM_MC_ADVANCE_RIP_AND_FINISH();
2381 IEM_MC_END();
2382 break;
2383
2384 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2385 }
2386}
2387
2388
2389/**
2390 * @opcode 0x58
2391 */
2392FNIEMOP_DEF(iemOp_pop_eAX)
2393{
2394 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2395 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2396}
2397
2398
2399/**
2400 * @opcode 0x59
2401 */
2402FNIEMOP_DEF(iemOp_pop_eCX)
2403{
2404 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2405 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2406}
2407
2408
2409/**
2410 * @opcode 0x5a
2411 */
2412FNIEMOP_DEF(iemOp_pop_eDX)
2413{
2414 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2415 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2416}
2417
2418
2419/**
2420 * @opcode 0x5b
2421 */
2422FNIEMOP_DEF(iemOp_pop_eBX)
2423{
2424 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2425 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2426}
2427
2428
2429/**
2430 * @opcode 0x5c
2431 */
2432FNIEMOP_DEF(iemOp_pop_eSP)
2433{
2434 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2435 if (IEM_IS_64BIT_CODE(pVCpu))
2436 {
2437 if (pVCpu->iem.s.uRexB)
2438 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2439 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2440 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2441 }
2442
2443 /** @todo add testcase for this instruction. */
2444 switch (pVCpu->iem.s.enmEffOpSize)
2445 {
2446 case IEMMODE_16BIT:
2447 IEM_MC_BEGIN(0, 2, 0, 0);
2448 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2449 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2450 IEM_MC_LOCAL(uint16_t, u16Dst);
2451 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2452 IEM_MC_REF_LOCAL(pu16Dst, u16Dst);
2453 IEM_MC_POP_U16(pu16Dst); /** @todo not correct MC, fix later. */
2454 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
2455 IEM_MC_ADVANCE_RIP_AND_FINISH();
2456 IEM_MC_END();
2457 break;
2458
2459 case IEMMODE_32BIT:
2460 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
2461 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2462 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2463 IEM_MC_LOCAL(uint32_t, u32Dst);
2464 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2465 IEM_MC_REF_LOCAL(pu32Dst, u32Dst);
2466 IEM_MC_POP_U32(pu32Dst);
2467 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
2468 IEM_MC_ADVANCE_RIP_AND_FINISH();
2469 IEM_MC_END();
2470 break;
2471
2472 case IEMMODE_64BIT:
2473 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
2474 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2475 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2476 IEM_MC_LOCAL(uint64_t, u64Dst);
2477 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2478 IEM_MC_REF_LOCAL(pu64Dst, u64Dst);
2479 IEM_MC_POP_U64(pu64Dst);
2480 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
2481 IEM_MC_ADVANCE_RIP_AND_FINISH();
2482 IEM_MC_END();
2483 break;
2484
2485 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2486 }
2487}
2488
2489
2490/**
2491 * @opcode 0x5d
2492 */
2493FNIEMOP_DEF(iemOp_pop_eBP)
2494{
2495 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2496 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2497}
2498
2499
2500/**
2501 * @opcode 0x5e
2502 */
2503FNIEMOP_DEF(iemOp_pop_eSI)
2504{
2505 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2506 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2507}
2508
2509
2510/**
2511 * @opcode 0x5f
2512 */
2513FNIEMOP_DEF(iemOp_pop_eDI)
2514{
2515 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2516 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2517}
2518
2519
2520/**
2521 * @opcode 0x60
2522 */
2523FNIEMOP_DEF(iemOp_pusha)
2524{
2525 IEMOP_MNEMONIC(pusha, "pusha");
2526 IEMOP_HLP_MIN_186();
2527 IEMOP_HLP_NO_64BIT();
2528 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2529 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_16);
2530 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2531 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_32);
2532}
2533
2534
2535/**
2536 * @opcode 0x61
2537 */
2538FNIEMOP_DEF(iemOp_popa__mvex)
2539{
2540 if (!IEM_IS_64BIT_CODE(pVCpu))
2541 {
2542 IEMOP_MNEMONIC(popa, "popa");
2543 IEMOP_HLP_MIN_186();
2544 IEMOP_HLP_NO_64BIT();
2545 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2546 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2547 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2548 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2549 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2550 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2551 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2552 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2553 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2554 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2555 iemCImpl_popa_16);
2556 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2557 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2558 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2559 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2560 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2561 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2562 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2563 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2564 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2565 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2566 iemCImpl_popa_32);
2567 }
2568 IEMOP_MNEMONIC(mvex, "mvex");
2569 Log(("mvex prefix is not supported!\n"));
2570 IEMOP_RAISE_INVALID_OPCODE_RET();
2571}
2572
2573
2574/**
2575 * @opcode 0x62
2576 * @opmnemonic bound
2577 * @op1 Gv_RO
2578 * @op2 Ma
2579 * @opmincpu 80186
2580 * @ophints harmless x86_invalid_64
2581 * @optest op1=0 op2=0 ->
2582 * @optest op1=1 op2=0 -> value.xcpt=5
2583 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2584 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2585 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2586 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2587 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2588 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2589 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2590 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2591 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2592 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2593 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2594 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2595 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2596 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2597 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2598 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2599 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2600 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2601 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2602 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2603 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2604 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2605 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2606 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2607 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2608 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2609 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2610 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2611 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2612 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2613 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2614 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2615 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2616 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2617 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2618 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2619 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2620 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2621 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2622 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2623 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2624 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2625 */
2626FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2627{
2628 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2629 compatability mode it is invalid with MOD=3.
2630
2631 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2632 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2633 given as R and X without an exact description, so we assume it builds on
2634 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2635 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2636 uint8_t bRm;
2637 if (!IEM_IS_64BIT_CODE(pVCpu))
2638 {
2639 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2640 IEMOP_HLP_MIN_186();
2641 IEM_OPCODE_GET_NEXT_U8(&bRm);
2642 if (IEM_IS_MODRM_MEM_MODE(bRm))
2643 {
2644 /** @todo testcase: check that there are two memory accesses involved. Check
2645 * whether they're both read before the \#BR triggers. */
2646 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2647 {
2648 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
2649 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2650 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2651 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2653
2654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2656
2657 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2658 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2659 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2660
2661 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2662 IEM_MC_END();
2663 }
2664 else /* 32-bit operands */
2665 {
2666 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2667 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2668 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2669 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2671
2672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2674
2675 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2676 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2677 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2678
2679 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2680 IEM_MC_END();
2681 }
2682 }
2683
2684 /*
2685 * @opdone
2686 */
2687 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2688 {
2689 /* Note that there is no need for the CPU to fetch further bytes
2690 here because MODRM.MOD == 3. */
2691 Log(("evex not supported by the guest CPU!\n"));
2692 IEMOP_RAISE_INVALID_OPCODE_RET();
2693 }
2694 }
2695 else
2696 {
2697 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2698 * does modr/m read, whereas AMD probably doesn't... */
2699 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2700 {
2701 Log(("evex not supported by the guest CPU!\n"));
2702 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2703 }
2704 IEM_OPCODE_GET_NEXT_U8(&bRm);
2705 }
2706
2707 IEMOP_MNEMONIC(evex, "evex");
2708 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2709 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2710 Log(("evex prefix is not implemented!\n"));
2711 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2712}
2713
2714
2715/** Opcode 0x63 - non-64-bit modes. */
2716FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2717{
2718 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2719 IEMOP_HLP_MIN_286();
2720 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2721 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2722
2723 if (IEM_IS_MODRM_REG_MODE(bRm))
2724 {
2725 /* Register */
2726 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2727 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2728 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2729 IEM_MC_ARG(uint16_t, u16Src, 1);
2730 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2731
2732 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2733 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2734 IEM_MC_REF_EFLAGS(pEFlags);
2735 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2736
2737 IEM_MC_ADVANCE_RIP_AND_FINISH();
2738 IEM_MC_END();
2739 }
2740 else
2741 {
2742 /* Memory */
2743 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2744 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2745 IEM_MC_ARG(uint16_t, u16Src, 1);
2746 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2748 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2749
2750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2751 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2752 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2753 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2754 IEM_MC_FETCH_EFLAGS(EFlags);
2755 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2756
2757 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
2758 IEM_MC_COMMIT_EFLAGS(EFlags);
2759 IEM_MC_ADVANCE_RIP_AND_FINISH();
2760 IEM_MC_END();
2761 }
2762}
2763
2764
2765/**
2766 * @opcode 0x63
2767 *
2768 * @note This is a weird one. It works like a regular move instruction if
2769 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2770 * @todo This definitely needs a testcase to verify the odd cases. */
2771FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2772{
2773 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2774
2775 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2777
2778 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2779 {
2780 if (IEM_IS_MODRM_REG_MODE(bRm))
2781 {
2782 /*
2783 * Register to register.
2784 */
2785 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2787 IEM_MC_LOCAL(uint64_t, u64Value);
2788 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2789 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2790 IEM_MC_ADVANCE_RIP_AND_FINISH();
2791 IEM_MC_END();
2792 }
2793 else
2794 {
2795 /*
2796 * We're loading a register from memory.
2797 */
2798 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
2799 IEM_MC_LOCAL(uint64_t, u64Value);
2800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2803 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2804 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2805 IEM_MC_ADVANCE_RIP_AND_FINISH();
2806 IEM_MC_END();
2807 }
2808 }
2809 else
2810 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2811}
2812
2813
2814/**
2815 * @opcode 0x64
2816 * @opmnemonic segfs
2817 * @opmincpu 80386
2818 * @opgroup og_prefixes
2819 */
2820FNIEMOP_DEF(iemOp_seg_FS)
2821{
2822 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2823 IEMOP_HLP_MIN_386();
2824
2825 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2826 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2827
2828 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2829 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2830}
2831
2832
2833/**
2834 * @opcode 0x65
2835 * @opmnemonic seggs
2836 * @opmincpu 80386
2837 * @opgroup og_prefixes
2838 */
2839FNIEMOP_DEF(iemOp_seg_GS)
2840{
2841 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2842 IEMOP_HLP_MIN_386();
2843
2844 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2845 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2846
2847 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2848 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2849}
2850
2851
2852/**
2853 * @opcode 0x66
2854 * @opmnemonic opsize
2855 * @openc prefix
2856 * @opmincpu 80386
2857 * @ophints harmless
2858 * @opgroup og_prefixes
2859 */
2860FNIEMOP_DEF(iemOp_op_size)
2861{
2862 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2863 IEMOP_HLP_MIN_386();
2864
2865 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2866 iemRecalEffOpSize(pVCpu);
2867
2868 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2869 when REPZ or REPNZ are present. */
2870 if (pVCpu->iem.s.idxPrefix == 0)
2871 pVCpu->iem.s.idxPrefix = 1;
2872
2873 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2874 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2875}
2876
2877
2878/**
2879 * @opcode 0x67
2880 * @opmnemonic addrsize
2881 * @openc prefix
2882 * @opmincpu 80386
2883 * @ophints harmless
2884 * @opgroup og_prefixes
2885 */
2886FNIEMOP_DEF(iemOp_addr_size)
2887{
2888 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2889 IEMOP_HLP_MIN_386();
2890
2891 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2892 switch (pVCpu->iem.s.enmDefAddrMode)
2893 {
2894 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2895 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2896 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2897 default: AssertFailed();
2898 }
2899
2900 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2901 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2902}
2903
2904
2905/**
2906 * @opcode 0x68
2907 */
2908FNIEMOP_DEF(iemOp_push_Iz)
2909{
2910 IEMOP_MNEMONIC(push_Iz, "push Iz");
2911 IEMOP_HLP_MIN_186();
2912 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2913 switch (pVCpu->iem.s.enmEffOpSize)
2914 {
2915 case IEMMODE_16BIT:
2916 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
2917 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2919 IEM_MC_LOCAL_CONST(uint16_t, u16Value, u16Imm);
2920 IEM_MC_PUSH_U16(u16Value);
2921 IEM_MC_ADVANCE_RIP_AND_FINISH();
2922 IEM_MC_END();
2923 break;
2924
2925 case IEMMODE_32BIT:
2926 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2927 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2929 IEM_MC_LOCAL_CONST(uint32_t, u32Value, u32Imm);
2930 IEM_MC_PUSH_U32(u32Value);
2931 IEM_MC_ADVANCE_RIP_AND_FINISH();
2932 IEM_MC_END();
2933 break;
2934
2935 case IEMMODE_64BIT:
2936 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2937 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2939 IEM_MC_LOCAL_CONST(uint64_t, u64Value, u64Imm);
2940 IEM_MC_PUSH_U64(u64Value);
2941 IEM_MC_ADVANCE_RIP_AND_FINISH();
2942 IEM_MC_END();
2943 break;
2944
2945 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2946 }
2947}
2948
2949
2950/**
2951 * @opcode 0x69
2952 */
2953FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2954{
2955 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2956 IEMOP_HLP_MIN_186();
2957 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2958 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2959
2960 switch (pVCpu->iem.s.enmEffOpSize)
2961 {
2962 case IEMMODE_16BIT:
2963 {
2964 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2965 if (IEM_IS_MODRM_REG_MODE(bRm))
2966 {
2967 /* register operand */
2968 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2969 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
2970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2971 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2972 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2973 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2974 IEM_MC_LOCAL(uint16_t, u16Tmp);
2975
2976 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2977 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2978 IEM_MC_REF_EFLAGS(pEFlags);
2979 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2980 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2981
2982 IEM_MC_ADVANCE_RIP_AND_FINISH();
2983 IEM_MC_END();
2984 }
2985 else
2986 {
2987 /* memory operand */
2988 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
2989 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2991
2992 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2994
2995 IEM_MC_LOCAL(uint16_t, u16Tmp);
2996 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2997
2998 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2999 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3000 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3001 IEM_MC_REF_EFLAGS(pEFlags);
3002 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3003 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3004
3005 IEM_MC_ADVANCE_RIP_AND_FINISH();
3006 IEM_MC_END();
3007 }
3008 break;
3009 }
3010
3011 case IEMMODE_32BIT:
3012 {
3013 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3014 if (IEM_IS_MODRM_REG_MODE(bRm))
3015 {
3016 /* register operand */
3017 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3018 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3020 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3021 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
3022 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3023 IEM_MC_LOCAL(uint32_t, u32Tmp);
3024
3025 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3026 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3027 IEM_MC_REF_EFLAGS(pEFlags);
3028 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3029 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3030
3031 IEM_MC_ADVANCE_RIP_AND_FINISH();
3032 IEM_MC_END();
3033 }
3034 else
3035 {
3036 /* memory operand */
3037 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3040
3041 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3043
3044 IEM_MC_LOCAL(uint32_t, u32Tmp);
3045 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3046
3047 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3048 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3049 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3050 IEM_MC_REF_EFLAGS(pEFlags);
3051 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3052 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3053
3054 IEM_MC_ADVANCE_RIP_AND_FINISH();
3055 IEM_MC_END();
3056 }
3057 break;
3058 }
3059
3060 case IEMMODE_64BIT:
3061 {
3062 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3063 if (IEM_IS_MODRM_REG_MODE(bRm))
3064 {
3065 /* register operand */
3066 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3067 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3069 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3070 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
3071 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3072 IEM_MC_LOCAL(uint64_t, u64Tmp);
3073
3074 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3075 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3076 IEM_MC_REF_EFLAGS(pEFlags);
3077 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3078 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3079
3080 IEM_MC_ADVANCE_RIP_AND_FINISH();
3081 IEM_MC_END();
3082 }
3083 else
3084 {
3085 /* memory operand */
3086 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3089
3090 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /* parameter count for the threaded function for this block. */
3092
3093 IEM_MC_LOCAL(uint64_t, u64Tmp);
3094 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3095
3096 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3097 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int32_t)u32Imm, 1);
3098 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3099 IEM_MC_REF_EFLAGS(pEFlags);
3100 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3101 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3102
3103 IEM_MC_ADVANCE_RIP_AND_FINISH();
3104 IEM_MC_END();
3105 }
3106 break;
3107 }
3108
3109 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3110 }
3111}
3112
3113
3114/**
3115 * @opcode 0x6a
3116 */
3117FNIEMOP_DEF(iemOp_push_Ib)
3118{
3119 IEMOP_MNEMONIC(push_Ib, "push Ib");
3120 IEMOP_HLP_MIN_186();
3121 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3122 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3123
3124 switch (pVCpu->iem.s.enmEffOpSize)
3125 {
3126 case IEMMODE_16BIT:
3127 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_186, 0);
3128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3129 IEM_MC_LOCAL_CONST(uint16_t, uValue, (int16_t)i8Imm);
3130 IEM_MC_PUSH_U16(uValue);
3131 IEM_MC_ADVANCE_RIP_AND_FINISH();
3132 IEM_MC_END();
3133 break;
3134 case IEMMODE_32BIT:
3135 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3137 IEM_MC_LOCAL_CONST(uint32_t, uValue, (int32_t)i8Imm);
3138 IEM_MC_PUSH_U32(uValue);
3139 IEM_MC_ADVANCE_RIP_AND_FINISH();
3140 IEM_MC_END();
3141 break;
3142 case IEMMODE_64BIT:
3143 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
3144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3145 IEM_MC_LOCAL_CONST(uint64_t, uValue, (int64_t)i8Imm);
3146 IEM_MC_PUSH_U64(uValue);
3147 IEM_MC_ADVANCE_RIP_AND_FINISH();
3148 IEM_MC_END();
3149 break;
3150 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3151 }
3152}
3153
3154
3155/**
3156 * @opcode 0x6b
3157 */
3158FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3159{
3160 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3161 IEMOP_HLP_MIN_186();
3162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3163 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3164
3165 switch (pVCpu->iem.s.enmEffOpSize)
3166 {
3167 case IEMMODE_16BIT:
3168 {
3169 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3170 if (IEM_IS_MODRM_REG_MODE(bRm))
3171 {
3172 /* register operand */
3173 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3174 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3176 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3177 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3178 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3179 IEM_MC_LOCAL(uint16_t, u16Tmp);
3180
3181 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3182 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3183 IEM_MC_REF_EFLAGS(pEFlags);
3184 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3185 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3186
3187 IEM_MC_ADVANCE_RIP_AND_FINISH();
3188 IEM_MC_END();
3189 }
3190 else
3191 {
3192 /* memory operand */
3193 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3194
3195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3197
3198 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3200
3201 IEM_MC_LOCAL(uint16_t, u16Tmp);
3202 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3203
3204 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3205 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3206 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3207 IEM_MC_REF_EFLAGS(pEFlags);
3208 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3209 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3210
3211 IEM_MC_ADVANCE_RIP_AND_FINISH();
3212 IEM_MC_END();
3213 }
3214 break;
3215 }
3216
3217 case IEMMODE_32BIT:
3218 {
3219 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3220 if (IEM_IS_MODRM_REG_MODE(bRm))
3221 {
3222 /* register operand */
3223 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3224 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3226 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3227 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3228 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3229 IEM_MC_LOCAL(uint32_t, u32Tmp);
3230
3231 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3232 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3233 IEM_MC_REF_EFLAGS(pEFlags);
3234 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3235 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3236
3237 IEM_MC_ADVANCE_RIP_AND_FINISH();
3238 IEM_MC_END();
3239 }
3240 else
3241 {
3242 /* memory operand */
3243 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3246
3247 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3249
3250 IEM_MC_LOCAL(uint32_t, u32Tmp);
3251 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3252
3253 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3254 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3255 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3256 IEM_MC_REF_EFLAGS(pEFlags);
3257 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3258 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3259
3260 IEM_MC_ADVANCE_RIP_AND_FINISH();
3261 IEM_MC_END();
3262 }
3263 break;
3264 }
3265
3266 case IEMMODE_64BIT:
3267 {
3268 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3269 if (IEM_IS_MODRM_REG_MODE(bRm))
3270 {
3271 /* register operand */
3272 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3273 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3275 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3276 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3277 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3278 IEM_MC_LOCAL(uint64_t, u64Tmp);
3279
3280 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3281 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3282 IEM_MC_REF_EFLAGS(pEFlags);
3283 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3284 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3285
3286 IEM_MC_ADVANCE_RIP_AND_FINISH();
3287 IEM_MC_END();
3288 }
3289 else
3290 {
3291 /* memory operand */
3292 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3295
3296 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the threaded parameter count. */
3297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3298
3299 IEM_MC_LOCAL(uint64_t, u64Tmp);
3300 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3301
3302 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3303 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3304 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3305 IEM_MC_REF_EFLAGS(pEFlags);
3306 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3307 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3308
3309 IEM_MC_ADVANCE_RIP_AND_FINISH();
3310 IEM_MC_END();
3311 }
3312 break;
3313 }
3314
3315 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3316 }
3317}
3318
3319
3320/**
3321 * @opcode 0x6c
3322 */
3323FNIEMOP_DEF(iemOp_insb_Yb_DX)
3324{
3325 IEMOP_HLP_MIN_186();
3326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3327 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3328 {
3329 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3330 switch (pVCpu->iem.s.enmEffAddrMode)
3331 {
3332 case IEMMODE_16BIT:
3333 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3334 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3335 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3336 iemCImpl_rep_ins_op8_addr16, false);
3337 case IEMMODE_32BIT:
3338 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3339 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3340 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3341 iemCImpl_rep_ins_op8_addr32, false);
3342 case IEMMODE_64BIT:
3343 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3344 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3345 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3346 iemCImpl_rep_ins_op8_addr64, false);
3347 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3348 }
3349 }
3350 else
3351 {
3352 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3353 switch (pVCpu->iem.s.enmEffAddrMode)
3354 {
3355 case IEMMODE_16BIT:
3356 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3357 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3358 iemCImpl_ins_op8_addr16, false);
3359 case IEMMODE_32BIT:
3360 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3361 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3362 iemCImpl_ins_op8_addr32, false);
3363 case IEMMODE_64BIT:
3364 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3365 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3366 iemCImpl_ins_op8_addr64, false);
3367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3368 }
3369 }
3370}
3371
3372
3373/**
3374 * @opcode 0x6d
3375 */
3376FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3377{
3378 IEMOP_HLP_MIN_186();
3379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3380 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3381 {
3382 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3383 switch (pVCpu->iem.s.enmEffOpSize)
3384 {
3385 case IEMMODE_16BIT:
3386 switch (pVCpu->iem.s.enmEffAddrMode)
3387 {
3388 case IEMMODE_16BIT:
3389 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3390 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3391 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3392 iemCImpl_rep_ins_op16_addr16, false);
3393 case IEMMODE_32BIT:
3394 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3395 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3396 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3397 iemCImpl_rep_ins_op16_addr32, false);
3398 case IEMMODE_64BIT:
3399 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3400 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3401 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3402 iemCImpl_rep_ins_op16_addr64, false);
3403 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3404 }
3405 break;
3406 case IEMMODE_64BIT:
3407 case IEMMODE_32BIT:
3408 switch (pVCpu->iem.s.enmEffAddrMode)
3409 {
3410 case IEMMODE_16BIT:
3411 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3412 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3413 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3414 iemCImpl_rep_ins_op32_addr16, false);
3415 case IEMMODE_32BIT:
3416 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3417 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3418 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3419 iemCImpl_rep_ins_op32_addr32, false);
3420 case IEMMODE_64BIT:
3421 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3422 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3423 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3424 iemCImpl_rep_ins_op32_addr64, false);
3425 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3426 }
3427 break;
3428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3429 }
3430 }
3431 else
3432 {
3433 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3434 switch (pVCpu->iem.s.enmEffOpSize)
3435 {
3436 case IEMMODE_16BIT:
3437 switch (pVCpu->iem.s.enmEffAddrMode)
3438 {
3439 case IEMMODE_16BIT:
3440 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3441 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3442 iemCImpl_ins_op16_addr16, false);
3443 case IEMMODE_32BIT:
3444 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3445 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3446 iemCImpl_ins_op16_addr32, false);
3447 case IEMMODE_64BIT:
3448 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3449 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3450 iemCImpl_ins_op16_addr64, false);
3451 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3452 }
3453 break;
3454 case IEMMODE_64BIT:
3455 case IEMMODE_32BIT:
3456 switch (pVCpu->iem.s.enmEffAddrMode)
3457 {
3458 case IEMMODE_16BIT:
3459 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3460 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3461 iemCImpl_ins_op32_addr16, false);
3462 case IEMMODE_32BIT:
3463 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3464 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3465 iemCImpl_ins_op32_addr32, false);
3466 case IEMMODE_64BIT:
3467 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3468 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3469 iemCImpl_ins_op32_addr64, false);
3470 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3471 }
3472 break;
3473 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3474 }
3475 }
3476}
3477
3478
3479/**
3480 * @opcode 0x6e
3481 */
3482FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3483{
3484 IEMOP_HLP_MIN_186();
3485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3486 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3487 {
3488 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3489 switch (pVCpu->iem.s.enmEffAddrMode)
3490 {
3491 case IEMMODE_16BIT:
3492 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3493 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3494 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3495 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3496 case IEMMODE_32BIT:
3497 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3498 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3499 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3500 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3501 case IEMMODE_64BIT:
3502 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3503 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3504 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3505 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3506 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3507 }
3508 }
3509 else
3510 {
3511 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3512 switch (pVCpu->iem.s.enmEffAddrMode)
3513 {
3514 case IEMMODE_16BIT:
3515 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3516 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3517 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3518 case IEMMODE_32BIT:
3519 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3520 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3521 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3522 case IEMMODE_64BIT:
3523 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3524 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3525 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3526 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3527 }
3528 }
3529}
3530
3531
3532/**
3533 * @opcode 0x6f
3534 */
3535FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3536{
3537 IEMOP_HLP_MIN_186();
3538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3539 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3540 {
3541 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3542 switch (pVCpu->iem.s.enmEffOpSize)
3543 {
3544 case IEMMODE_16BIT:
3545 switch (pVCpu->iem.s.enmEffAddrMode)
3546 {
3547 case IEMMODE_16BIT:
3548 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3549 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3550 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3551 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3552 case IEMMODE_32BIT:
3553 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3554 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3555 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3556 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3557 case IEMMODE_64BIT:
3558 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3559 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3560 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3561 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3563 }
3564 break;
3565 case IEMMODE_64BIT:
3566 case IEMMODE_32BIT:
3567 switch (pVCpu->iem.s.enmEffAddrMode)
3568 {
3569 case IEMMODE_16BIT:
3570 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3571 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3572 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3573 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3574 case IEMMODE_32BIT:
3575 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3576 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3577 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3578 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3579 case IEMMODE_64BIT:
3580 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3581 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3582 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3583 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3584 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3585 }
3586 break;
3587 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3588 }
3589 }
3590 else
3591 {
3592 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3593 switch (pVCpu->iem.s.enmEffOpSize)
3594 {
3595 case IEMMODE_16BIT:
3596 switch (pVCpu->iem.s.enmEffAddrMode)
3597 {
3598 case IEMMODE_16BIT:
3599 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3600 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3601 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3602 case IEMMODE_32BIT:
3603 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3604 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3605 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3606 case IEMMODE_64BIT:
3607 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3608 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3609 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3610 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3611 }
3612 break;
3613 case IEMMODE_64BIT:
3614 case IEMMODE_32BIT:
3615 switch (pVCpu->iem.s.enmEffAddrMode)
3616 {
3617 case IEMMODE_16BIT:
3618 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3619 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3620 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3621 case IEMMODE_32BIT:
3622 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3623 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3624 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3625 case IEMMODE_64BIT:
3626 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3627 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3628 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3629 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3630 }
3631 break;
3632 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3633 }
3634 }
3635}
3636
3637
3638/**
3639 * @opcode 0x70
3640 */
3641FNIEMOP_DEF(iemOp_jo_Jb)
3642{
3643 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3644 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3645 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3646
3647 IEM_MC_BEGIN(0, 0, 0, 0);
3648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3649 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3650 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3651 } IEM_MC_ELSE() {
3652 IEM_MC_ADVANCE_RIP_AND_FINISH();
3653 } IEM_MC_ENDIF();
3654 IEM_MC_END();
3655}
3656
3657
3658/**
3659 * @opcode 0x71
3660 */
3661FNIEMOP_DEF(iemOp_jno_Jb)
3662{
3663 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3664 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3665 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3666
3667 IEM_MC_BEGIN(0, 0, 0, 0);
3668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3669 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3670 IEM_MC_ADVANCE_RIP_AND_FINISH();
3671 } IEM_MC_ELSE() {
3672 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3673 } IEM_MC_ENDIF();
3674 IEM_MC_END();
3675}
3676
3677/**
3678 * @opcode 0x72
3679 */
3680FNIEMOP_DEF(iemOp_jc_Jb)
3681{
3682 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3683 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3684 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3685
3686 IEM_MC_BEGIN(0, 0, 0, 0);
3687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3688 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3689 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3690 } IEM_MC_ELSE() {
3691 IEM_MC_ADVANCE_RIP_AND_FINISH();
3692 } IEM_MC_ENDIF();
3693 IEM_MC_END();
3694}
3695
3696
3697/**
3698 * @opcode 0x73
3699 */
3700FNIEMOP_DEF(iemOp_jnc_Jb)
3701{
3702 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3703 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3704 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3705
3706 IEM_MC_BEGIN(0, 0, 0, 0);
3707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3708 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3709 IEM_MC_ADVANCE_RIP_AND_FINISH();
3710 } IEM_MC_ELSE() {
3711 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3712 } IEM_MC_ENDIF();
3713 IEM_MC_END();
3714}
3715
3716
3717/**
3718 * @opcode 0x74
3719 */
3720FNIEMOP_DEF(iemOp_je_Jb)
3721{
3722 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3723 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3724 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3725
3726 IEM_MC_BEGIN(0, 0, 0, 0);
3727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3728 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3729 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3730 } IEM_MC_ELSE() {
3731 IEM_MC_ADVANCE_RIP_AND_FINISH();
3732 } IEM_MC_ENDIF();
3733 IEM_MC_END();
3734}
3735
3736
3737/**
3738 * @opcode 0x75
3739 */
3740FNIEMOP_DEF(iemOp_jne_Jb)
3741{
3742 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3743 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3744 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3745
3746 IEM_MC_BEGIN(0, 0, 0, 0);
3747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3748 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3749 IEM_MC_ADVANCE_RIP_AND_FINISH();
3750 } IEM_MC_ELSE() {
3751 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3752 } IEM_MC_ENDIF();
3753 IEM_MC_END();
3754}
3755
3756
3757/**
3758 * @opcode 0x76
3759 */
3760FNIEMOP_DEF(iemOp_jbe_Jb)
3761{
3762 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3763 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3764 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3765
3766 IEM_MC_BEGIN(0, 0, 0, 0);
3767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3768 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3769 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3770 } IEM_MC_ELSE() {
3771 IEM_MC_ADVANCE_RIP_AND_FINISH();
3772 } IEM_MC_ENDIF();
3773 IEM_MC_END();
3774}
3775
3776
3777/**
3778 * @opcode 0x77
3779 */
3780FNIEMOP_DEF(iemOp_jnbe_Jb)
3781{
3782 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3783 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3784 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3785
3786 IEM_MC_BEGIN(0, 0, 0, 0);
3787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3788 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3789 IEM_MC_ADVANCE_RIP_AND_FINISH();
3790 } IEM_MC_ELSE() {
3791 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3792 } IEM_MC_ENDIF();
3793 IEM_MC_END();
3794}
3795
3796
3797/**
3798 * @opcode 0x78
3799 */
3800FNIEMOP_DEF(iemOp_js_Jb)
3801{
3802 IEMOP_MNEMONIC(js_Jb, "js Jb");
3803 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3804 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3805
3806 IEM_MC_BEGIN(0, 0, 0, 0);
3807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3808 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3809 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3810 } IEM_MC_ELSE() {
3811 IEM_MC_ADVANCE_RIP_AND_FINISH();
3812 } IEM_MC_ENDIF();
3813 IEM_MC_END();
3814}
3815
3816
3817/**
3818 * @opcode 0x79
3819 */
3820FNIEMOP_DEF(iemOp_jns_Jb)
3821{
3822 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3823 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3824 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3825
3826 IEM_MC_BEGIN(0, 0, 0, 0);
3827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3828 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3829 IEM_MC_ADVANCE_RIP_AND_FINISH();
3830 } IEM_MC_ELSE() {
3831 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3832 } IEM_MC_ENDIF();
3833 IEM_MC_END();
3834}
3835
3836
3837/**
3838 * @opcode 0x7a
3839 */
3840FNIEMOP_DEF(iemOp_jp_Jb)
3841{
3842 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3843 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3844 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3845
3846 IEM_MC_BEGIN(0, 0, 0, 0);
3847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3848 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3849 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3850 } IEM_MC_ELSE() {
3851 IEM_MC_ADVANCE_RIP_AND_FINISH();
3852 } IEM_MC_ENDIF();
3853 IEM_MC_END();
3854}
3855
3856
3857/**
3858 * @opcode 0x7b
3859 */
3860FNIEMOP_DEF(iemOp_jnp_Jb)
3861{
3862 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3863 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3864 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3865
3866 IEM_MC_BEGIN(0, 0, 0, 0);
3867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3868 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3869 IEM_MC_ADVANCE_RIP_AND_FINISH();
3870 } IEM_MC_ELSE() {
3871 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3872 } IEM_MC_ENDIF();
3873 IEM_MC_END();
3874}
3875
3876
3877/**
3878 * @opcode 0x7c
3879 */
3880FNIEMOP_DEF(iemOp_jl_Jb)
3881{
3882 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3883 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3884 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3885
3886 IEM_MC_BEGIN(0, 0, 0, 0);
3887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3888 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3889 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3890 } IEM_MC_ELSE() {
3891 IEM_MC_ADVANCE_RIP_AND_FINISH();
3892 } IEM_MC_ENDIF();
3893 IEM_MC_END();
3894}
3895
3896
3897/**
3898 * @opcode 0x7d
3899 */
3900FNIEMOP_DEF(iemOp_jnl_Jb)
3901{
3902 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3903 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3904 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3905
3906 IEM_MC_BEGIN(0, 0, 0, 0);
3907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3908 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3909 IEM_MC_ADVANCE_RIP_AND_FINISH();
3910 } IEM_MC_ELSE() {
3911 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3912 } IEM_MC_ENDIF();
3913 IEM_MC_END();
3914}
3915
3916
3917/**
3918 * @opcode 0x7e
3919 */
3920FNIEMOP_DEF(iemOp_jle_Jb)
3921{
3922 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3923 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3924 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3925
3926 IEM_MC_BEGIN(0, 0, 0, 0);
3927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3928 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3929 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3930 } IEM_MC_ELSE() {
3931 IEM_MC_ADVANCE_RIP_AND_FINISH();
3932 } IEM_MC_ENDIF();
3933 IEM_MC_END();
3934}
3935
3936
3937/**
3938 * @opcode 0x7f
3939 */
3940FNIEMOP_DEF(iemOp_jnle_Jb)
3941{
3942 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3943 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3944 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3945
3946 IEM_MC_BEGIN(0, 0, 0, 0);
3947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3948 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3949 IEM_MC_ADVANCE_RIP_AND_FINISH();
3950 } IEM_MC_ELSE() {
3951 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3952 } IEM_MC_ENDIF();
3953 IEM_MC_END();
3954}
3955
3956
3957/**
3958 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3959 * iemOp_Grp1_Eb_Ib_80.
3960 */
3961#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
3962 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3963 { \
3964 /* register target */ \
3965 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3966 IEM_MC_BEGIN(3, 0, 0, 0); \
3967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3968 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3969 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3970 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3971 \
3972 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3973 IEM_MC_REF_EFLAGS(pEFlags); \
3974 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3975 \
3976 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3977 IEM_MC_END(); \
3978 } \
3979 else \
3980 { \
3981 /* memory target */ \
3982 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3983 { \
3984 IEM_MC_BEGIN(3, 3, 0, 0); \
3985 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3986 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3988 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3989 \
3990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3991 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3992 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3993 IEMOP_HLP_DONE_DECODING(); \
3994 \
3995 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3996 IEM_MC_FETCH_EFLAGS(EFlags); \
3997 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3998 \
3999 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
4000 IEM_MC_COMMIT_EFLAGS(EFlags); \
4001 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4002 IEM_MC_END(); \
4003 } \
4004 else \
4005 { \
4006 (void)0
4007
4008#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
4009 IEM_MC_BEGIN(3, 3, 0, 0); \
4010 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4011 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4013 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4014 \
4015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4016 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4017 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4018 IEMOP_HLP_DONE_DECODING(); \
4019 \
4020 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4021 IEM_MC_FETCH_EFLAGS(EFlags); \
4022 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
4023 \
4024 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
4025 IEM_MC_COMMIT_EFLAGS(EFlags); \
4026 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4027 IEM_MC_END(); \
4028 } \
4029 } \
4030 (void)0
4031
4032#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
4033 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4034 { \
4035 /* register target */ \
4036 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4037 IEM_MC_BEGIN(3, 0, 0, 0); \
4038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4039 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4040 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4041 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4042 \
4043 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4044 IEM_MC_REF_EFLAGS(pEFlags); \
4045 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4046 \
4047 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4048 IEM_MC_END(); \
4049 } \
4050 else \
4051 { \
4052 /* memory target */ \
4053 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4054 { \
4055 IEM_MC_BEGIN(3, 3, 0, 0); \
4056 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
4057 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4059 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4060 \
4061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4062 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4063 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4064 IEMOP_HLP_DONE_DECODING(); \
4065 \
4066 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4067 IEM_MC_FETCH_EFLAGS(EFlags); \
4068 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4069 \
4070 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
4071 IEM_MC_COMMIT_EFLAGS(EFlags); \
4072 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4073 IEM_MC_END(); \
4074 } \
4075 else \
4076 { \
4077 (void)0
4078
4079#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
4080 IEMOP_HLP_DONE_DECODING(); \
4081 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4082 } \
4083 } \
4084 (void)0
4085
4086
4087
4088/**
4089 * @opmaps grp1_80,grp1_83
4090 * @opcode /0
4091 */
4092FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
4093{
4094 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
4095 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
4096 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
4097}
4098
4099
4100/**
4101 * @opmaps grp1_80,grp1_83
4102 * @opcode /1
4103 */
4104FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
4105{
4106 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
4107 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
4108 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
4109}
4110
4111
4112/**
4113 * @opmaps grp1_80,grp1_83
4114 * @opcode /2
4115 */
4116FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4117{
4118 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4119 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4120 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4121}
4122
4123
4124/**
4125 * @opmaps grp1_80,grp1_83
4126 * @opcode /3
4127 */
4128FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4129{
4130 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4131 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4132 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4133}
4134
4135
4136/**
4137 * @opmaps grp1_80,grp1_83
4138 * @opcode /4
4139 */
4140FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4141{
4142 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4143 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4144 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4145}
4146
4147
4148/**
4149 * @opmaps grp1_80,grp1_83
4150 * @opcode /5
4151 */
4152FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4153{
4154 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4155 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4156 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4157}
4158
4159
4160/**
4161 * @opmaps grp1_80,grp1_83
4162 * @opcode /6
4163 */
4164FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4165{
4166 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4167 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4168 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4169}
4170
4171
4172/**
4173 * @opmaps grp1_80,grp1_83
4174 * @opcode /7
4175 */
4176FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4177{
4178 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4179 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4180 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4181}
4182
4183
4184/**
4185 * @opcode 0x80
4186 */
4187FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4188{
4189 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4190 switch (IEM_GET_MODRM_REG_8(bRm))
4191 {
4192 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4193 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4194 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4195 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4196 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4197 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4198 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4199 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4201 }
4202}
4203
4204
4205/**
4206 * Body for a group 1 binary operator.
4207 */
4208#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4209 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4210 { \
4211 /* register target */ \
4212 switch (pVCpu->iem.s.enmEffOpSize) \
4213 { \
4214 case IEMMODE_16BIT: \
4215 { \
4216 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4217 IEM_MC_BEGIN(3, 0, 0, 0); \
4218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4219 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4220 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4221 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4222 \
4223 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4224 IEM_MC_REF_EFLAGS(pEFlags); \
4225 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4226 \
4227 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4228 IEM_MC_END(); \
4229 break; \
4230 } \
4231 \
4232 case IEMMODE_32BIT: \
4233 { \
4234 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4235 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4237 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4238 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4239 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4240 \
4241 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4242 IEM_MC_REF_EFLAGS(pEFlags); \
4243 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4244 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4245 \
4246 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4247 IEM_MC_END(); \
4248 break; \
4249 } \
4250 \
4251 case IEMMODE_64BIT: \
4252 { \
4253 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4254 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4256 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4257 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4258 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4259 \
4260 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4261 IEM_MC_REF_EFLAGS(pEFlags); \
4262 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4263 \
4264 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4265 IEM_MC_END(); \
4266 break; \
4267 } \
4268 \
4269 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4270 } \
4271 } \
4272 else \
4273 { \
4274 /* memory target */ \
4275 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4276 { \
4277 switch (pVCpu->iem.s.enmEffOpSize) \
4278 { \
4279 case IEMMODE_16BIT: \
4280 { \
4281 IEM_MC_BEGIN(3, 3, 0, 0); \
4282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4284 \
4285 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4286 IEMOP_HLP_DONE_DECODING(); \
4287 \
4288 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4289 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4290 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4291 \
4292 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4293 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4294 IEM_MC_FETCH_EFLAGS(EFlags); \
4295 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4296 \
4297 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4298 IEM_MC_COMMIT_EFLAGS(EFlags); \
4299 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4300 IEM_MC_END(); \
4301 break; \
4302 } \
4303 \
4304 case IEMMODE_32BIT: \
4305 { \
4306 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4309 \
4310 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4311 IEMOP_HLP_DONE_DECODING(); \
4312 \
4313 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4314 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4315 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4316 \
4317 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4318 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4319 IEM_MC_FETCH_EFLAGS(EFlags); \
4320 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4321 \
4322 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4323 IEM_MC_COMMIT_EFLAGS(EFlags); \
4324 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4325 IEM_MC_END(); \
4326 break; \
4327 } \
4328 \
4329 case IEMMODE_64BIT: \
4330 { \
4331 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4332 \
4333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4335 \
4336 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4337 IEMOP_HLP_DONE_DECODING(); \
4338 \
4339 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4340 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4341 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4342 \
4343 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4344 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4345 IEM_MC_FETCH_EFLAGS(EFlags); \
4346 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4347 \
4348 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4349 IEM_MC_COMMIT_EFLAGS(EFlags); \
4350 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4351 IEM_MC_END(); \
4352 break; \
4353 } \
4354 \
4355 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4356 } \
4357 } \
4358 else \
4359 { \
4360 (void)0
4361/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4362#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4363 switch (pVCpu->iem.s.enmEffOpSize) \
4364 { \
4365 case IEMMODE_16BIT: \
4366 { \
4367 IEM_MC_BEGIN(3, 3, 0, 0); \
4368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4370 \
4371 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4372 IEMOP_HLP_DONE_DECODING(); \
4373 \
4374 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4375 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4376 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4377 \
4378 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4379 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4380 IEM_MC_FETCH_EFLAGS(EFlags); \
4381 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4382 \
4383 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4384 IEM_MC_COMMIT_EFLAGS(EFlags); \
4385 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4386 IEM_MC_END(); \
4387 break; \
4388 } \
4389 \
4390 case IEMMODE_32BIT: \
4391 { \
4392 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4395 \
4396 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4397 IEMOP_HLP_DONE_DECODING(); \
4398 \
4399 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4400 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4401 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4402 \
4403 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4404 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4405 IEM_MC_FETCH_EFLAGS(EFlags); \
4406 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4407 \
4408 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4409 IEM_MC_COMMIT_EFLAGS(EFlags); \
4410 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4411 IEM_MC_END(); \
4412 break; \
4413 } \
4414 \
4415 case IEMMODE_64BIT: \
4416 { \
4417 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4418 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4419 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4420 \
4421 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4422 IEMOP_HLP_DONE_DECODING(); \
4423 \
4424 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4425 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4426 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4427 \
4428 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4429 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4430 IEM_MC_FETCH_EFLAGS(EFlags); \
4431 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4432 \
4433 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4434 IEM_MC_COMMIT_EFLAGS(EFlags); \
4435 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4436 IEM_MC_END(); \
4437 break; \
4438 } \
4439 \
4440 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4441 } \
4442 } \
4443 } \
4444 (void)0
4445
4446/* read-only version */
4447#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4448 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4449 { \
4450 /* register target */ \
4451 switch (pVCpu->iem.s.enmEffOpSize) \
4452 { \
4453 case IEMMODE_16BIT: \
4454 { \
4455 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4456 IEM_MC_BEGIN(3, 0, 0, 0); \
4457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4458 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4459 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4460 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4461 \
4462 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4463 IEM_MC_REF_EFLAGS(pEFlags); \
4464 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4465 \
4466 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4467 IEM_MC_END(); \
4468 break; \
4469 } \
4470 \
4471 case IEMMODE_32BIT: \
4472 { \
4473 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4474 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4476 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4477 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4478 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4479 \
4480 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4481 IEM_MC_REF_EFLAGS(pEFlags); \
4482 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4483 \
4484 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4485 IEM_MC_END(); \
4486 break; \
4487 } \
4488 \
4489 case IEMMODE_64BIT: \
4490 { \
4491 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4492 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4494 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4495 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4496 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4497 \
4498 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4499 IEM_MC_REF_EFLAGS(pEFlags); \
4500 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4501 \
4502 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4503 IEM_MC_END(); \
4504 break; \
4505 } \
4506 \
4507 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4508 } \
4509 } \
4510 else \
4511 { \
4512 /* memory target */ \
4513 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4514 { \
4515 switch (pVCpu->iem.s.enmEffOpSize) \
4516 { \
4517 case IEMMODE_16BIT: \
4518 { \
4519 IEM_MC_BEGIN(3, 3, 0, 0); \
4520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4522 \
4523 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4524 IEMOP_HLP_DONE_DECODING(); \
4525 \
4526 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4527 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4528 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4529 \
4530 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4531 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4532 IEM_MC_FETCH_EFLAGS(EFlags); \
4533 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4534 \
4535 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
4536 IEM_MC_COMMIT_EFLAGS(EFlags); \
4537 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4538 IEM_MC_END(); \
4539 break; \
4540 } \
4541 \
4542 case IEMMODE_32BIT: \
4543 { \
4544 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4545 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4546 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4547 \
4548 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4549 IEMOP_HLP_DONE_DECODING(); \
4550 \
4551 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4552 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4553 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4554 \
4555 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4556 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4557 IEM_MC_FETCH_EFLAGS(EFlags); \
4558 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4559 \
4560 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
4561 IEM_MC_COMMIT_EFLAGS(EFlags); \
4562 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4563 IEM_MC_END(); \
4564 break; \
4565 } \
4566 \
4567 case IEMMODE_64BIT: \
4568 { \
4569 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4572 \
4573 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4574 IEMOP_HLP_DONE_DECODING(); \
4575 \
4576 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4577 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4578 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4579 \
4580 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4581 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4582 IEM_MC_FETCH_EFLAGS(EFlags); \
4583 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4584 \
4585 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
4586 IEM_MC_COMMIT_EFLAGS(EFlags); \
4587 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4588 IEM_MC_END(); \
4589 break; \
4590 } \
4591 \
4592 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4593 } \
4594 } \
4595 else \
4596 { \
4597 IEMOP_HLP_DONE_DECODING(); \
4598 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4599 } \
4600 } \
4601 (void)0
4602
4603
4604/**
4605 * @opmaps grp1_81
4606 * @opcode /0
4607 */
4608FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4609{
4610 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4611 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4612 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4613}
4614
4615
4616/**
4617 * @opmaps grp1_81
4618 * @opcode /1
4619 */
4620FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4621{
4622 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4623 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4624 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4625}
4626
4627
4628/**
4629 * @opmaps grp1_81
4630 * @opcode /2
4631 */
4632FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4633{
4634 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4635 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4636 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4637}
4638
4639
4640/**
4641 * @opmaps grp1_81
4642 * @opcode /3
4643 */
4644FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4645{
4646 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4647 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4648 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4649}
4650
4651
4652/**
4653 * @opmaps grp1_81
4654 * @opcode /4
4655 */
4656FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4657{
4658 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4659 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4660 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4661}
4662
4663
4664/**
4665 * @opmaps grp1_81
4666 * @opcode /5
4667 */
4668FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4669{
4670 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4671 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4672 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4673}
4674
4675
4676/**
4677 * @opmaps grp1_81
4678 * @opcode /6
4679 */
4680FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4681{
4682 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4683 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4684 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4685}
4686
4687
4688/**
4689 * @opmaps grp1_81
4690 * @opcode /7
4691 */
4692FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4693{
4694 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4695 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4696}
4697
4698
4699/**
4700 * @opcode 0x81
4701 */
4702FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4703{
4704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4705 switch (IEM_GET_MODRM_REG_8(bRm))
4706 {
4707 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4708 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4709 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4710 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4711 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4712 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4713 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4714 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4715 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4716 }
4717}
4718
4719
4720/**
4721 * @opcode 0x82
4722 * @opmnemonic grp1_82
4723 * @opgroup og_groups
4724 */
4725FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4726{
4727 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4728 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4729}
4730
4731
4732/**
4733 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4734 * iemOp_Grp1_Ev_Ib.
4735 */
4736#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4737 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4738 { \
4739 /* \
4740 * Register target \
4741 */ \
4742 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4743 switch (pVCpu->iem.s.enmEffOpSize) \
4744 { \
4745 case IEMMODE_16BIT: \
4746 IEM_MC_BEGIN(3, 0, 0, 0); \
4747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4748 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4749 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4750 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4751 \
4752 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4753 IEM_MC_REF_EFLAGS(pEFlags); \
4754 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4755 \
4756 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4757 IEM_MC_END(); \
4758 break; \
4759 \
4760 case IEMMODE_32BIT: \
4761 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4763 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4764 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4765 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4766 \
4767 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4768 IEM_MC_REF_EFLAGS(pEFlags); \
4769 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4770 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4771 \
4772 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4773 IEM_MC_END(); \
4774 break; \
4775 \
4776 case IEMMODE_64BIT: \
4777 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4779 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4780 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4781 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4782 \
4783 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4784 IEM_MC_REF_EFLAGS(pEFlags); \
4785 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4786 \
4787 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4788 IEM_MC_END(); \
4789 break; \
4790 \
4791 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4792 } \
4793 } \
4794 else \
4795 { \
4796 /* \
4797 * Memory target. \
4798 */ \
4799 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4800 { \
4801 switch (pVCpu->iem.s.enmEffOpSize) \
4802 { \
4803 case IEMMODE_16BIT: \
4804 IEM_MC_BEGIN(3, 3, 0, 0); \
4805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4807 \
4808 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4809 IEMOP_HLP_DONE_DECODING(); \
4810 \
4811 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4812 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4813 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4814 \
4815 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4816 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4817 IEM_MC_FETCH_EFLAGS(EFlags); \
4818 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4819 \
4820 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4821 IEM_MC_COMMIT_EFLAGS(EFlags); \
4822 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4823 IEM_MC_END(); \
4824 break; \
4825 \
4826 case IEMMODE_32BIT: \
4827 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4830 \
4831 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4832 IEMOP_HLP_DONE_DECODING(); \
4833 \
4834 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4835 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4836 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4837 \
4838 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4839 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4840 IEM_MC_FETCH_EFLAGS(EFlags); \
4841 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4842 \
4843 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4844 IEM_MC_COMMIT_EFLAGS(EFlags); \
4845 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4846 IEM_MC_END(); \
4847 break; \
4848 \
4849 case IEMMODE_64BIT: \
4850 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4852 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4853 \
4854 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4855 IEMOP_HLP_DONE_DECODING(); \
4856 \
4857 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4858 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4859 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4860 \
4861 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4862 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4863 IEM_MC_FETCH_EFLAGS(EFlags); \
4864 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4865 \
4866 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4867 IEM_MC_COMMIT_EFLAGS(EFlags); \
4868 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4869 IEM_MC_END(); \
4870 break; \
4871 \
4872 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4873 } \
4874 } \
4875 else \
4876 { \
4877 (void)0
4878/* Separate macro to work around parsing issue in IEMAllInstPython.py */
4879#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4880 switch (pVCpu->iem.s.enmEffOpSize) \
4881 { \
4882 case IEMMODE_16BIT: \
4883 IEM_MC_BEGIN(3, 3, 0, 0); \
4884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4886 \
4887 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4888 IEMOP_HLP_DONE_DECODING(); \
4889 \
4890 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4891 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4892 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4893 \
4894 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4895 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4896 IEM_MC_FETCH_EFLAGS(EFlags); \
4897 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4898 \
4899 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4900 IEM_MC_COMMIT_EFLAGS(EFlags); \
4901 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4902 IEM_MC_END(); \
4903 break; \
4904 \
4905 case IEMMODE_32BIT: \
4906 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4909 \
4910 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4911 IEMOP_HLP_DONE_DECODING(); \
4912 \
4913 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4914 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4915 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4916 \
4917 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4918 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4919 IEM_MC_FETCH_EFLAGS(EFlags); \
4920 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4921 \
4922 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4923 IEM_MC_COMMIT_EFLAGS(EFlags); \
4924 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4925 IEM_MC_END(); \
4926 break; \
4927 \
4928 case IEMMODE_64BIT: \
4929 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4931 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4932 \
4933 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4934 IEMOP_HLP_DONE_DECODING(); \
4935 \
4936 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4937 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4938 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4939 \
4940 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4941 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4942 IEM_MC_FETCH_EFLAGS(EFlags); \
4943 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4944 \
4945 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4946 IEM_MC_COMMIT_EFLAGS(EFlags); \
4947 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4948 IEM_MC_END(); \
4949 break; \
4950 \
4951 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4952 } \
4953 } \
4954 } \
4955 (void)0
4956
4957/* read-only variant */
4958#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4959 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4960 { \
4961 /* \
4962 * Register target \
4963 */ \
4964 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4965 switch (pVCpu->iem.s.enmEffOpSize) \
4966 { \
4967 case IEMMODE_16BIT: \
4968 IEM_MC_BEGIN(3, 0, 0, 0); \
4969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4970 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4971 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4972 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4973 \
4974 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4975 IEM_MC_REF_EFLAGS(pEFlags); \
4976 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4977 \
4978 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4979 IEM_MC_END(); \
4980 break; \
4981 \
4982 case IEMMODE_32BIT: \
4983 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4985 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4986 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4987 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4988 \
4989 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4990 IEM_MC_REF_EFLAGS(pEFlags); \
4991 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4992 \
4993 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4994 IEM_MC_END(); \
4995 break; \
4996 \
4997 case IEMMODE_64BIT: \
4998 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5000 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
5001 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
5002 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5003 \
5004 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5005 IEM_MC_REF_EFLAGS(pEFlags); \
5006 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5007 \
5008 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5009 IEM_MC_END(); \
5010 break; \
5011 \
5012 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5013 } \
5014 } \
5015 else \
5016 { \
5017 /* \
5018 * Memory target. \
5019 */ \
5020 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
5021 { \
5022 switch (pVCpu->iem.s.enmEffOpSize) \
5023 { \
5024 case IEMMODE_16BIT: \
5025 IEM_MC_BEGIN(3, 3, 0, 0); \
5026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5028 \
5029 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5030 IEMOP_HLP_DONE_DECODING(); \
5031 \
5032 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5033 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
5034 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5035 \
5036 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
5037 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5038 IEM_MC_FETCH_EFLAGS(EFlags); \
5039 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
5040 \
5041 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
5042 IEM_MC_COMMIT_EFLAGS(EFlags); \
5043 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5044 IEM_MC_END(); \
5045 break; \
5046 \
5047 case IEMMODE_32BIT: \
5048 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
5049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5051 \
5052 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5053 IEMOP_HLP_DONE_DECODING(); \
5054 \
5055 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5056 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
5057 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5058 \
5059 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
5060 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5061 IEM_MC_FETCH_EFLAGS(EFlags); \
5062 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
5063 \
5064 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
5065 IEM_MC_COMMIT_EFLAGS(EFlags); \
5066 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5067 IEM_MC_END(); \
5068 break; \
5069 \
5070 case IEMMODE_64BIT: \
5071 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5072 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5074 \
5075 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5076 IEMOP_HLP_DONE_DECODING(); \
5077 \
5078 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5079 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
5080 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5081 \
5082 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
5083 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5084 IEM_MC_FETCH_EFLAGS(EFlags); \
5085 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5086 \
5087 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
5088 IEM_MC_COMMIT_EFLAGS(EFlags); \
5089 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5090 IEM_MC_END(); \
5091 break; \
5092 \
5093 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5094 } \
5095 } \
5096 else \
5097 { \
5098 IEMOP_HLP_DONE_DECODING(); \
5099 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5100 } \
5101 } \
5102 (void)0
5103
5104/**
5105 * @opmaps grp1_83
5106 * @opcode /0
5107 */
5108FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5109{
5110 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5111 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
5112 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
5113}
5114
5115
5116/**
5117 * @opmaps grp1_83
5118 * @opcode /1
5119 */
5120FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5121{
5122 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5123 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
5124 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
5125}
5126
5127
5128/**
5129 * @opmaps grp1_83
5130 * @opcode /2
5131 */
5132FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5133{
5134 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5135 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5136 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5137}
5138
5139
5140/**
5141 * @opmaps grp1_83
5142 * @opcode /3
5143 */
5144FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5145{
5146 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5147 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5148 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5149}
5150
5151
5152/**
5153 * @opmaps grp1_83
5154 * @opcode /4
5155 */
5156FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5157{
5158 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5159 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5160 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5161}
5162
5163
5164/**
5165 * @opmaps grp1_83
5166 * @opcode /5
5167 */
5168FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5169{
5170 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5171 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5172 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5173}
5174
5175
5176/**
5177 * @opmaps grp1_83
5178 * @opcode /6
5179 */
5180FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5181{
5182 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5183 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5184 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5185}
5186
5187
5188/**
5189 * @opmaps grp1_83
5190 * @opcode /7
5191 */
5192FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5193{
5194 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5195 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5196}
5197
5198
5199/**
5200 * @opcode 0x83
5201 */
5202FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5203{
5204 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5205 to the 386 even if absent in the intel reference manuals and some
5206 3rd party opcode listings. */
5207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5208 switch (IEM_GET_MODRM_REG_8(bRm))
5209 {
5210 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5211 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5212 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5213 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5214 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5215 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5216 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5217 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5218 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5219 }
5220}
5221
5222
5223/**
5224 * @opcode 0x84
5225 */
5226FNIEMOP_DEF(iemOp_test_Eb_Gb)
5227{
5228 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5229 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5230 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8);
5231 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
5232}
5233
5234
5235/**
5236 * @opcode 0x85
5237 */
5238FNIEMOP_DEF(iemOp_test_Ev_Gv)
5239{
5240 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5241 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5242 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64);
5243}
5244
5245
5246/**
5247 * @opcode 0x86
5248 */
5249FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5250{
5251 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5252 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5253
5254 /*
5255 * If rm is denoting a register, no more instruction bytes.
5256 */
5257 if (IEM_IS_MODRM_REG_MODE(bRm))
5258 {
5259 IEM_MC_BEGIN(0, 2, 0, 0);
5260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5261 IEM_MC_LOCAL(uint8_t, uTmp1);
5262 IEM_MC_LOCAL(uint8_t, uTmp2);
5263
5264 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5265 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5266 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5267 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5268
5269 IEM_MC_ADVANCE_RIP_AND_FINISH();
5270 IEM_MC_END();
5271 }
5272 else
5273 {
5274 /*
5275 * We're accessing memory.
5276 */
5277 IEM_MC_BEGIN(2, 4, 0, 0);
5278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5279 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5280 IEM_MC_LOCAL(uint8_t, uTmpReg);
5281 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
5282 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1);
5283
5284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5286 IEM_MC_MEM_MAP_U8_RW(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5287 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5288 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5289 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
5290 else
5291 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
5292 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Mem, bUnmapInfo);
5293 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5294
5295 IEM_MC_ADVANCE_RIP_AND_FINISH();
5296 IEM_MC_END();
5297 }
5298}
5299
5300
5301/**
5302 * @opcode 0x87
5303 */
5304FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5305{
5306 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5308
5309 /*
5310 * If rm is denoting a register, no more instruction bytes.
5311 */
5312 if (IEM_IS_MODRM_REG_MODE(bRm))
5313 {
5314 switch (pVCpu->iem.s.enmEffOpSize)
5315 {
5316 case IEMMODE_16BIT:
5317 IEM_MC_BEGIN(0, 2, 0, 0);
5318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5319 IEM_MC_LOCAL(uint16_t, uTmp1);
5320 IEM_MC_LOCAL(uint16_t, uTmp2);
5321
5322 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5323 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5324 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5325 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5326
5327 IEM_MC_ADVANCE_RIP_AND_FINISH();
5328 IEM_MC_END();
5329 break;
5330
5331 case IEMMODE_32BIT:
5332 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5334 IEM_MC_LOCAL(uint32_t, uTmp1);
5335 IEM_MC_LOCAL(uint32_t, uTmp2);
5336
5337 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5338 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5339 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5340 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5341
5342 IEM_MC_ADVANCE_RIP_AND_FINISH();
5343 IEM_MC_END();
5344 break;
5345
5346 case IEMMODE_64BIT:
5347 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5349 IEM_MC_LOCAL(uint64_t, uTmp1);
5350 IEM_MC_LOCAL(uint64_t, uTmp2);
5351
5352 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5353 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5354 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5355 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5356
5357 IEM_MC_ADVANCE_RIP_AND_FINISH();
5358 IEM_MC_END();
5359 break;
5360
5361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5362 }
5363 }
5364 else
5365 {
5366 /*
5367 * We're accessing memory.
5368 */
5369 switch (pVCpu->iem.s.enmEffOpSize)
5370 {
5371 case IEMMODE_16BIT:
5372 IEM_MC_BEGIN(2, 4, 0, 0);
5373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5374 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5375 IEM_MC_LOCAL(uint16_t, uTmpReg);
5376 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
5377 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1);
5378
5379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5381 IEM_MC_MEM_MAP_U16_RW(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5382 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5383 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5384 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
5385 else
5386 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
5387 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Mem, bUnmapInfo);
5388 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5389
5390 IEM_MC_ADVANCE_RIP_AND_FINISH();
5391 IEM_MC_END();
5392 break;
5393
5394 case IEMMODE_32BIT:
5395 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0);
5396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5397 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5398 IEM_MC_LOCAL(uint32_t, uTmpReg);
5399 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
5400 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1);
5401
5402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5404 IEM_MC_MEM_MAP_U32_RW(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5405 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5406 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5407 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
5408 else
5409 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
5410 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Mem, bUnmapInfo);
5411 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5412
5413 IEM_MC_ADVANCE_RIP_AND_FINISH();
5414 IEM_MC_END();
5415 break;
5416
5417 case IEMMODE_64BIT:
5418 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0);
5419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5420 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5421 IEM_MC_LOCAL(uint64_t, uTmpReg);
5422 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
5423 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1);
5424
5425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5427 IEM_MC_MEM_MAP_U64_RW(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5428 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5429 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5430 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
5431 else
5432 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
5433 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Mem, bUnmapInfo);
5434 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5435
5436 IEM_MC_ADVANCE_RIP_AND_FINISH();
5437 IEM_MC_END();
5438 break;
5439
5440 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5441 }
5442 }
5443}
5444
5445
5446/**
5447 * @opcode 0x88
5448 */
5449FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5450{
5451 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5452
5453 uint8_t bRm;
5454 IEM_OPCODE_GET_NEXT_U8(&bRm);
5455
5456 /*
5457 * If rm is denoting a register, no more instruction bytes.
5458 */
5459 if (IEM_IS_MODRM_REG_MODE(bRm))
5460 {
5461 IEM_MC_BEGIN(0, 1, 0, 0);
5462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5463 IEM_MC_LOCAL(uint8_t, u8Value);
5464 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5465 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5466 IEM_MC_ADVANCE_RIP_AND_FINISH();
5467 IEM_MC_END();
5468 }
5469 else
5470 {
5471 /*
5472 * We're writing a register to memory.
5473 */
5474 IEM_MC_BEGIN(0, 2, 0, 0);
5475 IEM_MC_LOCAL(uint8_t, u8Value);
5476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5479 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5480 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5481 IEM_MC_ADVANCE_RIP_AND_FINISH();
5482 IEM_MC_END();
5483 }
5484}
5485
5486
5487/**
5488 * @opcode 0x89
5489 */
5490FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5491{
5492 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5493
5494 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5495
5496 /*
5497 * If rm is denoting a register, no more instruction bytes.
5498 */
5499 if (IEM_IS_MODRM_REG_MODE(bRm))
5500 {
5501 switch (pVCpu->iem.s.enmEffOpSize)
5502 {
5503 case IEMMODE_16BIT:
5504 IEM_MC_BEGIN(0, 1, 0, 0);
5505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5506 IEM_MC_LOCAL(uint16_t, u16Value);
5507 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5508 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5509 IEM_MC_ADVANCE_RIP_AND_FINISH();
5510 IEM_MC_END();
5511 break;
5512
5513 case IEMMODE_32BIT:
5514 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5516 IEM_MC_LOCAL(uint32_t, u32Value);
5517 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5518 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5519 IEM_MC_ADVANCE_RIP_AND_FINISH();
5520 IEM_MC_END();
5521 break;
5522
5523 case IEMMODE_64BIT:
5524 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5526 IEM_MC_LOCAL(uint64_t, u64Value);
5527 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5528 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5529 IEM_MC_ADVANCE_RIP_AND_FINISH();
5530 IEM_MC_END();
5531 break;
5532
5533 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5534 }
5535 }
5536 else
5537 {
5538 /*
5539 * We're writing a register to memory.
5540 */
5541 switch (pVCpu->iem.s.enmEffOpSize)
5542 {
5543 case IEMMODE_16BIT:
5544 IEM_MC_BEGIN(0, 2, 0, 0);
5545 IEM_MC_LOCAL(uint16_t, u16Value);
5546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5547 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5549 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5550 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5551 IEM_MC_ADVANCE_RIP_AND_FINISH();
5552 IEM_MC_END();
5553 break;
5554
5555 case IEMMODE_32BIT:
5556 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5557 IEM_MC_LOCAL(uint32_t, u32Value);
5558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5561 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5562 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5563 IEM_MC_ADVANCE_RIP_AND_FINISH();
5564 IEM_MC_END();
5565 break;
5566
5567 case IEMMODE_64BIT:
5568 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5569 IEM_MC_LOCAL(uint64_t, u64Value);
5570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5573 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5574 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5575 IEM_MC_ADVANCE_RIP_AND_FINISH();
5576 IEM_MC_END();
5577 break;
5578
5579 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5580 }
5581 }
5582}
5583
5584
5585/**
5586 * @opcode 0x8a
5587 */
5588FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5589{
5590 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5591
5592 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5593
5594 /*
5595 * If rm is denoting a register, no more instruction bytes.
5596 */
5597 if (IEM_IS_MODRM_REG_MODE(bRm))
5598 {
5599 IEM_MC_BEGIN(0, 1, 0, 0);
5600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5601 IEM_MC_LOCAL(uint8_t, u8Value);
5602 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5603 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5604 IEM_MC_ADVANCE_RIP_AND_FINISH();
5605 IEM_MC_END();
5606 }
5607 else
5608 {
5609 /*
5610 * We're loading a register from memory.
5611 */
5612 IEM_MC_BEGIN(0, 2, 0, 0);
5613 IEM_MC_LOCAL(uint8_t, u8Value);
5614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5617 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5618 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5619 IEM_MC_ADVANCE_RIP_AND_FINISH();
5620 IEM_MC_END();
5621 }
5622}
5623
5624
5625/**
5626 * @opcode 0x8b
5627 */
5628FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5629{
5630 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5631
5632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5633
5634 /*
5635 * If rm is denoting a register, no more instruction bytes.
5636 */
5637 if (IEM_IS_MODRM_REG_MODE(bRm))
5638 {
5639 switch (pVCpu->iem.s.enmEffOpSize)
5640 {
5641 case IEMMODE_16BIT:
5642 IEM_MC_BEGIN(0, 1, 0, 0);
5643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5644 IEM_MC_LOCAL(uint16_t, u16Value);
5645 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5646 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5647 IEM_MC_ADVANCE_RIP_AND_FINISH();
5648 IEM_MC_END();
5649 break;
5650
5651 case IEMMODE_32BIT:
5652 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5654 IEM_MC_LOCAL(uint32_t, u32Value);
5655 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5656 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5657 IEM_MC_ADVANCE_RIP_AND_FINISH();
5658 IEM_MC_END();
5659 break;
5660
5661 case IEMMODE_64BIT:
5662 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5664 IEM_MC_LOCAL(uint64_t, u64Value);
5665 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5666 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5667 IEM_MC_ADVANCE_RIP_AND_FINISH();
5668 IEM_MC_END();
5669 break;
5670
5671 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5672 }
5673 }
5674 else
5675 {
5676 /*
5677 * We're loading a register from memory.
5678 */
5679 switch (pVCpu->iem.s.enmEffOpSize)
5680 {
5681 case IEMMODE_16BIT:
5682 IEM_MC_BEGIN(0, 2, 0, 0);
5683 IEM_MC_LOCAL(uint16_t, u16Value);
5684 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5685 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5687 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5688 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5689 IEM_MC_ADVANCE_RIP_AND_FINISH();
5690 IEM_MC_END();
5691 break;
5692
5693 case IEMMODE_32BIT:
5694 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5695 IEM_MC_LOCAL(uint32_t, u32Value);
5696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5699 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5700 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5701 IEM_MC_ADVANCE_RIP_AND_FINISH();
5702 IEM_MC_END();
5703 break;
5704
5705 case IEMMODE_64BIT:
5706 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5707 IEM_MC_LOCAL(uint64_t, u64Value);
5708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5711 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5712 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5713 IEM_MC_ADVANCE_RIP_AND_FINISH();
5714 IEM_MC_END();
5715 break;
5716
5717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5718 }
5719 }
5720}
5721
5722
5723/**
5724 * opcode 0x63
5725 * @todo Table fixme
5726 */
5727FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5728{
5729 if (!IEM_IS_64BIT_CODE(pVCpu))
5730 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5731 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5732 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5733 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5734}
5735
5736
5737/**
5738 * @opcode 0x8c
5739 */
5740FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5741{
5742 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5743
5744 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5745
5746 /*
5747 * Check that the destination register exists. The REX.R prefix is ignored.
5748 */
5749 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5750 if (iSegReg > X86_SREG_GS)
5751 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5752
5753 /*
5754 * If rm is denoting a register, no more instruction bytes.
5755 * In that case, the operand size is respected and the upper bits are
5756 * cleared (starting with some pentium).
5757 */
5758 if (IEM_IS_MODRM_REG_MODE(bRm))
5759 {
5760 switch (pVCpu->iem.s.enmEffOpSize)
5761 {
5762 case IEMMODE_16BIT:
5763 IEM_MC_BEGIN(0, 1, 0, 0);
5764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5765 IEM_MC_LOCAL(uint16_t, u16Value);
5766 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5767 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5768 IEM_MC_ADVANCE_RIP_AND_FINISH();
5769 IEM_MC_END();
5770 break;
5771
5772 case IEMMODE_32BIT:
5773 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5775 IEM_MC_LOCAL(uint32_t, u32Value);
5776 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5777 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5778 IEM_MC_ADVANCE_RIP_AND_FINISH();
5779 IEM_MC_END();
5780 break;
5781
5782 case IEMMODE_64BIT:
5783 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5785 IEM_MC_LOCAL(uint64_t, u64Value);
5786 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5787 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5788 IEM_MC_ADVANCE_RIP_AND_FINISH();
5789 IEM_MC_END();
5790 break;
5791
5792 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5793 }
5794 }
5795 else
5796 {
5797 /*
5798 * We're saving the register to memory. The access is word sized
5799 * regardless of operand size prefixes.
5800 */
5801#if 0 /* not necessary */
5802 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5803#endif
5804 IEM_MC_BEGIN(0, 2, 0, 0);
5805 IEM_MC_LOCAL(uint16_t, u16Value);
5806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5809 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5810 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5811 IEM_MC_ADVANCE_RIP_AND_FINISH();
5812 IEM_MC_END();
5813 }
5814}
5815
5816
5817
5818
5819/**
5820 * @opcode 0x8d
5821 */
5822FNIEMOP_DEF(iemOp_lea_Gv_M)
5823{
5824 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5825 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5826 if (IEM_IS_MODRM_REG_MODE(bRm))
5827 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5828
5829 switch (pVCpu->iem.s.enmEffOpSize)
5830 {
5831 case IEMMODE_16BIT:
5832 IEM_MC_BEGIN(0, 2, 0, 0);
5833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5836 IEM_MC_LOCAL(uint16_t, u16Cast);
5837 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5838 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5839 IEM_MC_ADVANCE_RIP_AND_FINISH();
5840 IEM_MC_END();
5841 break;
5842
5843 case IEMMODE_32BIT:
5844 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5848 IEM_MC_LOCAL(uint32_t, u32Cast);
5849 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5850 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5851 IEM_MC_ADVANCE_RIP_AND_FINISH();
5852 IEM_MC_END();
5853 break;
5854
5855 case IEMMODE_64BIT:
5856 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5860 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5861 IEM_MC_ADVANCE_RIP_AND_FINISH();
5862 IEM_MC_END();
5863 break;
5864
5865 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5866 }
5867}
5868
5869
5870/**
5871 * @opcode 0x8e
5872 */
5873FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5874{
5875 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5876
5877 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5878
5879 /*
5880 * The practical operand size is 16-bit.
5881 */
5882#if 0 /* not necessary */
5883 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5884#endif
5885
5886 /*
5887 * Check that the destination register exists and can be used with this
5888 * instruction. The REX.R prefix is ignored.
5889 */
5890 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5891 /** @todo r=bird: What does 8086 do here wrt CS? */
5892 if ( iSegReg == X86_SREG_CS
5893 || iSegReg > X86_SREG_GS)
5894 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5895
5896 /*
5897 * If rm is denoting a register, no more instruction bytes.
5898 *
5899 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
5900 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
5901 * register. This is a restriction of the current recompiler
5902 * approach.
5903 */
5904 if (IEM_IS_MODRM_REG_MODE(bRm))
5905 {
5906#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
5907 IEM_MC_BEGIN(2, 0, 0, a_fCImplFlags); \
5908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5909 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5910 IEM_MC_ARG(uint16_t, u16Value, 1); \
5911 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5912 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5913 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5914 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5915 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg), \
5916 iemCImpl_load_SReg, iSRegArg, u16Value); \
5917 IEM_MC_END()
5918
5919 if (iSegReg == X86_SREG_SS)
5920 {
5921 if (IEM_IS_32BIT_CODE(pVCpu))
5922 {
5923 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5924 }
5925 else
5926 {
5927 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5928 }
5929 }
5930 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5931 {
5932 IEMOP_MOV_SW_EV_REG_BODY(0);
5933 }
5934 else
5935 {
5936 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
5937 }
5938#undef IEMOP_MOV_SW_EV_REG_BODY
5939 }
5940 else
5941 {
5942 /*
5943 * We're loading the register from memory. The access is word sized
5944 * regardless of operand size prefixes.
5945 */
5946#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
5947 IEM_MC_BEGIN(2, 1, 0, a_fCImplFlags); \
5948 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5949 IEM_MC_ARG(uint16_t, u16Value, 1); \
5950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5953 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5954 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5955 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5956 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5957 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg), \
5958 iemCImpl_load_SReg, iSRegArg, u16Value); \
5959 IEM_MC_END()
5960
5961 if (iSegReg == X86_SREG_SS)
5962 {
5963 if (IEM_IS_32BIT_CODE(pVCpu))
5964 {
5965 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5966 }
5967 else
5968 {
5969 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5970 }
5971 }
5972 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5973 {
5974 IEMOP_MOV_SW_EV_MEM_BODY(0);
5975 }
5976 else
5977 {
5978 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
5979 }
5980#undef IEMOP_MOV_SW_EV_MEM_BODY
5981 }
5982}
5983
5984
5985/** Opcode 0x8f /0. */
5986FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5987{
5988 /* This bugger is rather annoying as it requires rSP to be updated before
5989 doing the effective address calculations. Will eventually require a
5990 split between the R/M+SIB decoding and the effective address
5991 calculation - which is something that is required for any attempt at
5992 reusing this code for a recompiler. It may also be good to have if we
5993 need to delay #UD exception caused by invalid lock prefixes.
5994
5995 For now, we'll do a mostly safe interpreter-only implementation here. */
5996 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5997 * now until tests show it's checked.. */
5998 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5999
6000 /* Register access is relatively easy and can share code. */
6001 if (IEM_IS_MODRM_REG_MODE(bRm))
6002 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
6003
6004 /*
6005 * Memory target.
6006 *
6007 * Intel says that RSP is incremented before it's used in any effective
6008 * address calcuations. This means some serious extra annoyance here since
6009 * we decode and calculate the effective address in one step and like to
6010 * delay committing registers till everything is done.
6011 *
6012 * So, we'll decode and calculate the effective address twice. This will
6013 * require some recoding if turned into a recompiler.
6014 */
6015 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
6016
6017#if 1 /* This can be compiled, optimize later if needed. */
6018 switch (pVCpu->iem.s.enmEffOpSize)
6019 {
6020 case IEMMODE_16BIT:
6021 IEM_MC_BEGIN(2, 0, 0, 0);
6022 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
6024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6025 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6026 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
6027 IEM_MC_END();
6028 break;
6029
6030 case IEMMODE_32BIT:
6031 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
6032 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
6034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6035 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6036 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
6037 IEM_MC_END();
6038 break;
6039
6040 case IEMMODE_64BIT:
6041 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
6042 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
6044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6045 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6046 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
6047 IEM_MC_END();
6048 break;
6049
6050 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6051 }
6052
6053#else
6054# ifndef TST_IEM_CHECK_MC
6055 /* Calc effective address with modified ESP. */
6056/** @todo testcase */
6057 RTGCPTR GCPtrEff;
6058 VBOXSTRICTRC rcStrict;
6059 switch (pVCpu->iem.s.enmEffOpSize)
6060 {
6061 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
6062 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
6063 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
6064 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6065 }
6066 if (rcStrict != VINF_SUCCESS)
6067 return rcStrict;
6068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6069
6070 /* Perform the operation - this should be CImpl. */
6071 RTUINT64U TmpRsp;
6072 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
6073 switch (pVCpu->iem.s.enmEffOpSize)
6074 {
6075 case IEMMODE_16BIT:
6076 {
6077 uint16_t u16Value;
6078 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
6079 if (rcStrict == VINF_SUCCESS)
6080 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
6081 break;
6082 }
6083
6084 case IEMMODE_32BIT:
6085 {
6086 uint32_t u32Value;
6087 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
6088 if (rcStrict == VINF_SUCCESS)
6089 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
6090 break;
6091 }
6092
6093 case IEMMODE_64BIT:
6094 {
6095 uint64_t u64Value;
6096 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
6097 if (rcStrict == VINF_SUCCESS)
6098 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
6099 break;
6100 }
6101
6102 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6103 }
6104 if (rcStrict == VINF_SUCCESS)
6105 {
6106 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
6107 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
6108 }
6109 return rcStrict;
6110
6111# else
6112 return VERR_IEM_IPE_2;
6113# endif
6114#endif
6115}
6116
6117
6118/**
6119 * @opcode 0x8f
6120 */
6121FNIEMOP_DEF(iemOp_Grp1A__xop)
6122{
6123 /*
6124 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
6125 * three byte VEX prefix, except that the mmmmm field cannot have the values
6126 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
6127 */
6128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6129 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
6130 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
6131
6132 IEMOP_MNEMONIC(xop, "xop");
6133 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
6134 {
6135 /** @todo Test when exctly the XOP conformance checks kick in during
6136 * instruction decoding and fetching (using \#PF). */
6137 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6138 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6139 if ( ( pVCpu->iem.s.fPrefixes
6140 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6141 == 0)
6142 {
6143 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6144 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6145 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6146 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6147 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6148 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6149 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6150 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6151 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6152
6153 /** @todo XOP: Just use new tables and decoders. */
6154 switch (bRm & 0x1f)
6155 {
6156 case 8: /* xop opcode map 8. */
6157 IEMOP_BITCH_ABOUT_STUB();
6158 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6159
6160 case 9: /* xop opcode map 9. */
6161 IEMOP_BITCH_ABOUT_STUB();
6162 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6163
6164 case 10: /* xop opcode map 10. */
6165 IEMOP_BITCH_ABOUT_STUB();
6166 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6167
6168 default:
6169 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6170 IEMOP_RAISE_INVALID_OPCODE_RET();
6171 }
6172 }
6173 else
6174 Log(("XOP: Invalid prefix mix!\n"));
6175 }
6176 else
6177 Log(("XOP: XOP support disabled!\n"));
6178 IEMOP_RAISE_INVALID_OPCODE_RET();
6179}
6180
6181
6182/**
6183 * Common 'xchg reg,rAX' helper.
6184 */
6185FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6186{
6187 iReg |= pVCpu->iem.s.uRexB;
6188 switch (pVCpu->iem.s.enmEffOpSize)
6189 {
6190 case IEMMODE_16BIT:
6191 IEM_MC_BEGIN(0, 2, 0, 0);
6192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6193 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6194 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6195 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6196 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6197 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6198 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6199 IEM_MC_ADVANCE_RIP_AND_FINISH();
6200 IEM_MC_END();
6201 break;
6202
6203 case IEMMODE_32BIT:
6204 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6206 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6207 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6208 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6209 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6210 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6211 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6212 IEM_MC_ADVANCE_RIP_AND_FINISH();
6213 IEM_MC_END();
6214 break;
6215
6216 case IEMMODE_64BIT:
6217 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6219 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6220 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6221 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6222 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6223 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6224 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6225 IEM_MC_ADVANCE_RIP_AND_FINISH();
6226 IEM_MC_END();
6227 break;
6228
6229 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6230 }
6231}
6232
6233
6234/**
6235 * @opcode 0x90
6236 */
6237FNIEMOP_DEF(iemOp_nop)
6238{
6239 /* R8/R8D and RAX/EAX can be exchanged. */
6240 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6241 {
6242 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6243 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6244 }
6245
6246 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6247 {
6248 IEMOP_MNEMONIC(pause, "pause");
6249 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6250 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6251 if (!IEM_IS_IN_GUEST(pVCpu))
6252 { /* probable */ }
6253#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6254 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6255 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmx_pause);
6256#endif
6257#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6258 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6259 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_svm_pause);
6260#endif
6261 }
6262 else
6263 IEMOP_MNEMONIC(nop, "nop");
6264 /** @todo testcase: lock nop; lock pause */
6265 IEM_MC_BEGIN(0, 0, 0, 0);
6266 IEMOP_HLP_DONE_DECODING();
6267 IEM_MC_ADVANCE_RIP_AND_FINISH();
6268 IEM_MC_END();
6269}
6270
6271
6272/**
6273 * @opcode 0x91
6274 */
6275FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6276{
6277 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6278 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6279}
6280
6281
6282/**
6283 * @opcode 0x92
6284 */
6285FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6286{
6287 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6288 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6289}
6290
6291
6292/**
6293 * @opcode 0x93
6294 */
6295FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6296{
6297 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6298 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6299}
6300
6301
6302/**
6303 * @opcode 0x94
6304 */
6305FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6306{
6307 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6308 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6309}
6310
6311
6312/**
6313 * @opcode 0x95
6314 */
6315FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6316{
6317 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6318 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6319}
6320
6321
6322/**
6323 * @opcode 0x96
6324 */
6325FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6326{
6327 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6328 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6329}
6330
6331
6332/**
6333 * @opcode 0x97
6334 */
6335FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6336{
6337 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6338 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6339}
6340
6341
6342/**
6343 * @opcode 0x98
6344 */
6345FNIEMOP_DEF(iemOp_cbw)
6346{
6347 switch (pVCpu->iem.s.enmEffOpSize)
6348 {
6349 case IEMMODE_16BIT:
6350 IEMOP_MNEMONIC(cbw, "cbw");
6351 IEM_MC_BEGIN(0, 1, 0, 0);
6352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6353 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6354 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6355 } IEM_MC_ELSE() {
6356 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6357 } IEM_MC_ENDIF();
6358 IEM_MC_ADVANCE_RIP_AND_FINISH();
6359 IEM_MC_END();
6360 break;
6361
6362 case IEMMODE_32BIT:
6363 IEMOP_MNEMONIC(cwde, "cwde");
6364 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6366 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6367 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6368 } IEM_MC_ELSE() {
6369 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6370 } IEM_MC_ENDIF();
6371 IEM_MC_ADVANCE_RIP_AND_FINISH();
6372 IEM_MC_END();
6373 break;
6374
6375 case IEMMODE_64BIT:
6376 IEMOP_MNEMONIC(cdqe, "cdqe");
6377 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6379 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6380 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6381 } IEM_MC_ELSE() {
6382 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6383 } IEM_MC_ENDIF();
6384 IEM_MC_ADVANCE_RIP_AND_FINISH();
6385 IEM_MC_END();
6386 break;
6387
6388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6389 }
6390}
6391
6392
6393/**
6394 * @opcode 0x99
6395 */
6396FNIEMOP_DEF(iemOp_cwd)
6397{
6398 switch (pVCpu->iem.s.enmEffOpSize)
6399 {
6400 case IEMMODE_16BIT:
6401 IEMOP_MNEMONIC(cwd, "cwd");
6402 IEM_MC_BEGIN(0, 1, 0, 0);
6403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6404 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6405 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6406 } IEM_MC_ELSE() {
6407 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6408 } IEM_MC_ENDIF();
6409 IEM_MC_ADVANCE_RIP_AND_FINISH();
6410 IEM_MC_END();
6411 break;
6412
6413 case IEMMODE_32BIT:
6414 IEMOP_MNEMONIC(cdq, "cdq");
6415 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6417 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6418 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6419 } IEM_MC_ELSE() {
6420 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6421 } IEM_MC_ENDIF();
6422 IEM_MC_ADVANCE_RIP_AND_FINISH();
6423 IEM_MC_END();
6424 break;
6425
6426 case IEMMODE_64BIT:
6427 IEMOP_MNEMONIC(cqo, "cqo");
6428 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6430 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6431 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6432 } IEM_MC_ELSE() {
6433 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6434 } IEM_MC_ENDIF();
6435 IEM_MC_ADVANCE_RIP_AND_FINISH();
6436 IEM_MC_END();
6437 break;
6438
6439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6440 }
6441}
6442
6443
6444/**
6445 * @opcode 0x9a
6446 */
6447FNIEMOP_DEF(iemOp_call_Ap)
6448{
6449 IEMOP_MNEMONIC(call_Ap, "call Ap");
6450 IEMOP_HLP_NO_64BIT();
6451
6452 /* Decode the far pointer address and pass it on to the far call C implementation. */
6453 uint32_t off32Seg;
6454 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6455 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6456 else
6457 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6458 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6460 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
6461 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
6462 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6463 /** @todo make task-switches, ring-switches, ++ return non-zero status */
6464}
6465
6466
6467/** Opcode 0x9b. (aka fwait) */
6468FNIEMOP_DEF(iemOp_wait)
6469{
6470 IEMOP_MNEMONIC(wait, "wait");
6471 IEM_MC_BEGIN(0, 0, 0, 0);
6472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6473 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6474 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6475 IEM_MC_ADVANCE_RIP_AND_FINISH();
6476 IEM_MC_END();
6477}
6478
6479
6480/**
6481 * @opcode 0x9c
6482 */
6483FNIEMOP_DEF(iemOp_pushf_Fv)
6484{
6485 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6487 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6488 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6489 iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6490}
6491
6492
6493/**
6494 * @opcode 0x9d
6495 */
6496FNIEMOP_DEF(iemOp_popf_Fv)
6497{
6498 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6500 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6501 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6502 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6503 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6504}
6505
6506
6507/**
6508 * @opcode 0x9e
6509 */
6510FNIEMOP_DEF(iemOp_sahf)
6511{
6512 IEMOP_MNEMONIC(sahf, "sahf");
6513 if ( IEM_IS_64BIT_CODE(pVCpu)
6514 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6515 IEMOP_RAISE_INVALID_OPCODE_RET();
6516 IEM_MC_BEGIN(0, 2, 0, 0);
6517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6518 IEM_MC_LOCAL(uint32_t, u32Flags);
6519 IEM_MC_LOCAL(uint32_t, EFlags);
6520 IEM_MC_FETCH_EFLAGS(EFlags);
6521 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6522 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6523 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6524 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6525 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6526 IEM_MC_COMMIT_EFLAGS(EFlags);
6527 IEM_MC_ADVANCE_RIP_AND_FINISH();
6528 IEM_MC_END();
6529}
6530
6531
6532/**
6533 * @opcode 0x9f
6534 */
6535FNIEMOP_DEF(iemOp_lahf)
6536{
6537 IEMOP_MNEMONIC(lahf, "lahf");
6538 if ( IEM_IS_64BIT_CODE(pVCpu)
6539 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6540 IEMOP_RAISE_INVALID_OPCODE_RET();
6541 IEM_MC_BEGIN(0, 1, 0, 0);
6542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6543 IEM_MC_LOCAL(uint8_t, u8Flags);
6544 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6545 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6546 IEM_MC_ADVANCE_RIP_AND_FINISH();
6547 IEM_MC_END();
6548}
6549
6550
6551/**
6552 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6553 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6554 * Will return/throw on failures.
6555 * @param a_GCPtrMemOff The variable to store the offset in.
6556 */
6557#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6558 do \
6559 { \
6560 switch (pVCpu->iem.s.enmEffAddrMode) \
6561 { \
6562 case IEMMODE_16BIT: \
6563 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6564 break; \
6565 case IEMMODE_32BIT: \
6566 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6567 break; \
6568 case IEMMODE_64BIT: \
6569 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6570 break; \
6571 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6572 } \
6573 } while (0)
6574
6575/**
6576 * @opcode 0xa0
6577 */
6578FNIEMOP_DEF(iemOp_mov_AL_Ob)
6579{
6580 /*
6581 * Get the offset.
6582 */
6583 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6584 RTGCPTR GCPtrMemOffDecode;
6585 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6586
6587 /*
6588 * Fetch AL.
6589 */
6590 IEM_MC_BEGIN(0, 2, 0, 0);
6591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6592 IEM_MC_LOCAL(uint8_t, u8Tmp);
6593 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6594 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6595 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6596 IEM_MC_ADVANCE_RIP_AND_FINISH();
6597 IEM_MC_END();
6598}
6599
6600
6601/**
6602 * @opcode 0xa1
6603 */
6604FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6605{
6606 /*
6607 * Get the offset.
6608 */
6609 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6610 RTGCPTR GCPtrMemOffDecode;
6611 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6612
6613 /*
6614 * Fetch rAX.
6615 */
6616 switch (pVCpu->iem.s.enmEffOpSize)
6617 {
6618 case IEMMODE_16BIT:
6619 IEM_MC_BEGIN(0, 2, 0, 0);
6620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6621 IEM_MC_LOCAL(uint16_t, u16Tmp);
6622 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6623 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6624 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6625 IEM_MC_ADVANCE_RIP_AND_FINISH();
6626 IEM_MC_END();
6627 break;
6628
6629 case IEMMODE_32BIT:
6630 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6632 IEM_MC_LOCAL(uint32_t, u32Tmp);
6633 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6634 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6635 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6636 IEM_MC_ADVANCE_RIP_AND_FINISH();
6637 IEM_MC_END();
6638 break;
6639
6640 case IEMMODE_64BIT:
6641 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6643 IEM_MC_LOCAL(uint64_t, u64Tmp);
6644 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6645 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6646 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6647 IEM_MC_ADVANCE_RIP_AND_FINISH();
6648 IEM_MC_END();
6649 break;
6650
6651 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6652 }
6653}
6654
6655
6656/**
6657 * @opcode 0xa2
6658 */
6659FNIEMOP_DEF(iemOp_mov_Ob_AL)
6660{
6661 /*
6662 * Get the offset.
6663 */
6664 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6665 RTGCPTR GCPtrMemOffDecode;
6666 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6667
6668 /*
6669 * Store AL.
6670 */
6671 IEM_MC_BEGIN(0, 2, 0, 0);
6672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6673 IEM_MC_LOCAL(uint8_t, u8Tmp);
6674 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6675 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6676 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6677 IEM_MC_ADVANCE_RIP_AND_FINISH();
6678 IEM_MC_END();
6679}
6680
6681
6682/**
6683 * @opcode 0xa3
6684 */
6685FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6686{
6687 /*
6688 * Get the offset.
6689 */
6690 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6691 RTGCPTR GCPtrMemOffDecode;
6692 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6693
6694 /*
6695 * Store rAX.
6696 */
6697 switch (pVCpu->iem.s.enmEffOpSize)
6698 {
6699 case IEMMODE_16BIT:
6700 IEM_MC_BEGIN(0, 2, 0, 0);
6701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6702 IEM_MC_LOCAL(uint16_t, u16Tmp);
6703 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6704 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6705 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6706 IEM_MC_ADVANCE_RIP_AND_FINISH();
6707 IEM_MC_END();
6708 break;
6709
6710 case IEMMODE_32BIT:
6711 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6713 IEM_MC_LOCAL(uint32_t, u32Tmp);
6714 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6715 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6716 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6717 IEM_MC_ADVANCE_RIP_AND_FINISH();
6718 IEM_MC_END();
6719 break;
6720
6721 case IEMMODE_64BIT:
6722 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6724 IEM_MC_LOCAL(uint64_t, u64Tmp);
6725 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6726 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6727 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6728 IEM_MC_ADVANCE_RIP_AND_FINISH();
6729 IEM_MC_END();
6730 break;
6731
6732 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6733 }
6734}
6735
6736/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6737#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
6738 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6740 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6741 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6742 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6743 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6744 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6745 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6746 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6747 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6748 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6749 } IEM_MC_ELSE() { \
6750 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6751 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6752 } IEM_MC_ENDIF(); \
6753 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6754 IEM_MC_END() \
6755
6756/**
6757 * @opcode 0xa4
6758 */
6759FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6760{
6761 /*
6762 * Use the C implementation if a repeat prefix is encountered.
6763 */
6764 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6765 {
6766 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6768 switch (pVCpu->iem.s.enmEffAddrMode)
6769 {
6770 case IEMMODE_16BIT:
6771 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6772 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6773 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6774 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6775 iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6776 case IEMMODE_32BIT:
6777 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6778 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6779 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6780 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6781 iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6782 case IEMMODE_64BIT:
6783 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6784 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6785 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6786 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6787 iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6789 }
6790 }
6791
6792 /*
6793 * Sharing case implementation with movs[wdq] below.
6794 */
6795 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6796 switch (pVCpu->iem.s.enmEffAddrMode)
6797 {
6798 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6799 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6800 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
6801 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6802 }
6803}
6804
6805
6806/**
6807 * @opcode 0xa5
6808 */
6809FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6810{
6811
6812 /*
6813 * Use the C implementation if a repeat prefix is encountered.
6814 */
6815 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6816 {
6817 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6819 switch (pVCpu->iem.s.enmEffOpSize)
6820 {
6821 case IEMMODE_16BIT:
6822 switch (pVCpu->iem.s.enmEffAddrMode)
6823 {
6824 case IEMMODE_16BIT:
6825 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6826 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6827 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6828 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6829 iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6830 case IEMMODE_32BIT:
6831 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6832 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6833 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6834 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6835 iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6836 case IEMMODE_64BIT:
6837 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6838 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6839 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6840 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6841 iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6842 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6843 }
6844 break;
6845 case IEMMODE_32BIT:
6846 switch (pVCpu->iem.s.enmEffAddrMode)
6847 {
6848 case IEMMODE_16BIT:
6849 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6850 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6851 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6852 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6853 iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6854 case IEMMODE_32BIT:
6855 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6856 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6857 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6858 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6859 iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6860 case IEMMODE_64BIT:
6861 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6862 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6863 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6864 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6865 iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6866 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6867 }
6868 case IEMMODE_64BIT:
6869 switch (pVCpu->iem.s.enmEffAddrMode)
6870 {
6871 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6872 case IEMMODE_32BIT:
6873 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6874 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6875 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6876 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6877 iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6878 case IEMMODE_64BIT:
6879 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6880 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6881 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6882 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6883 iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6884 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6885 }
6886 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6887 }
6888 }
6889
6890 /*
6891 * Annoying double switch here.
6892 * Using ugly macro for implementing the cases, sharing it with movsb.
6893 */
6894 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6895 switch (pVCpu->iem.s.enmEffOpSize)
6896 {
6897 case IEMMODE_16BIT:
6898 switch (pVCpu->iem.s.enmEffAddrMode)
6899 {
6900 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6901 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6902 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
6903 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6904 }
6905 break;
6906
6907 case IEMMODE_32BIT:
6908 switch (pVCpu->iem.s.enmEffAddrMode)
6909 {
6910 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6911 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6912 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
6913 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6914 }
6915 break;
6916
6917 case IEMMODE_64BIT:
6918 switch (pVCpu->iem.s.enmEffAddrMode)
6919 {
6920 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6921 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
6922 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
6923 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6924 }
6925 break;
6926 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6927 }
6928}
6929
6930#undef IEM_MOVS_CASE
6931
6932/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6933#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
6934 IEM_MC_BEGIN(3, 3, a_fMcFlags, 0); \
6935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6936 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
6937 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6938 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6939 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6940 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6941 \
6942 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6943 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
6944 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6945 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
6946 IEM_MC_REF_LOCAL(puValue1, uValue1); \
6947 IEM_MC_REF_EFLAGS(pEFlags); \
6948 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6949 \
6950 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6951 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6952 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6953 } IEM_MC_ELSE() { \
6954 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6955 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6956 } IEM_MC_ENDIF(); \
6957 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6958 IEM_MC_END() \
6959
6960/**
6961 * @opcode 0xa6
6962 */
6963FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6964{
6965
6966 /*
6967 * Use the C implementation if a repeat prefix is encountered.
6968 */
6969 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6970 {
6971 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6973 switch (pVCpu->iem.s.enmEffAddrMode)
6974 {
6975 case IEMMODE_16BIT:
6976 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6977 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6978 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6979 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6980 iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6981 case IEMMODE_32BIT:
6982 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6983 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6984 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6985 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6986 iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6987 case IEMMODE_64BIT:
6988 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6989 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6990 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6991 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6992 iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6993 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6994 }
6995 }
6996 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6997 {
6998 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7000 switch (pVCpu->iem.s.enmEffAddrMode)
7001 {
7002 case IEMMODE_16BIT:
7003 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7004 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7005 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7006 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7007 iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
7008 case IEMMODE_32BIT:
7009 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7010 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7011 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7012 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7013 iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7014 case IEMMODE_64BIT:
7015 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7016 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7017 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7018 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7019 iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7020 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7021 }
7022 }
7023
7024 /*
7025 * Sharing case implementation with cmps[wdq] below.
7026 */
7027 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
7028 switch (pVCpu->iem.s.enmEffAddrMode)
7029 {
7030 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7031 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7032 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
7033 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7034 }
7035}
7036
7037
7038/**
7039 * @opcode 0xa7
7040 */
7041FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
7042{
7043 /*
7044 * Use the C implementation if a repeat prefix is encountered.
7045 */
7046 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7047 {
7048 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
7049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7050 switch (pVCpu->iem.s.enmEffOpSize)
7051 {
7052 case IEMMODE_16BIT:
7053 switch (pVCpu->iem.s.enmEffAddrMode)
7054 {
7055 case IEMMODE_16BIT:
7056 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7057 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7058 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7059 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7060 iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7061 case IEMMODE_32BIT:
7062 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7063 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7064 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7065 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7066 iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7067 case IEMMODE_64BIT:
7068 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7069 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7070 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7071 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7072 iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7074 }
7075 break;
7076 case IEMMODE_32BIT:
7077 switch (pVCpu->iem.s.enmEffAddrMode)
7078 {
7079 case IEMMODE_16BIT:
7080 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7081 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7082 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7083 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7084 iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7085 case IEMMODE_32BIT:
7086 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7087 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7088 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7089 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7090 iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7091 case IEMMODE_64BIT:
7092 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7093 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7094 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7095 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7096 iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7097 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7098 }
7099 case IEMMODE_64BIT:
7100 switch (pVCpu->iem.s.enmEffAddrMode)
7101 {
7102 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
7103 case IEMMODE_32BIT:
7104 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7105 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7106 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7107 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7108 iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7109 case IEMMODE_64BIT:
7110 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7111 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7112 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7113 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7114 iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7115 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7116 }
7117 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7118 }
7119 }
7120
7121 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7122 {
7123 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
7124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7125 switch (pVCpu->iem.s.enmEffOpSize)
7126 {
7127 case IEMMODE_16BIT:
7128 switch (pVCpu->iem.s.enmEffAddrMode)
7129 {
7130 case IEMMODE_16BIT:
7131 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7132 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7133 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7134 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7135 iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7136 case IEMMODE_32BIT:
7137 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7138 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7139 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7140 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7141 iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7142 case IEMMODE_64BIT:
7143 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7144 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7145 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7146 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7147 iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7149 }
7150 break;
7151 case IEMMODE_32BIT:
7152 switch (pVCpu->iem.s.enmEffAddrMode)
7153 {
7154 case IEMMODE_16BIT:
7155 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7156 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7157 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7158 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7159 iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7160 case IEMMODE_32BIT:
7161 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7162 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7163 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7164 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7165 iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7166 case IEMMODE_64BIT:
7167 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7168 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7169 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7170 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7171 iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7172 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7173 }
7174 case IEMMODE_64BIT:
7175 switch (pVCpu->iem.s.enmEffAddrMode)
7176 {
7177 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
7178 case IEMMODE_32BIT:
7179 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7180 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7181 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7182 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7183 iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7184 case IEMMODE_64BIT:
7185 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7186 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7187 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7188 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7189 iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7191 }
7192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7193 }
7194 }
7195
7196 /*
7197 * Annoying double switch here.
7198 * Using ugly macro for implementing the cases, sharing it with cmpsb.
7199 */
7200 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
7201 switch (pVCpu->iem.s.enmEffOpSize)
7202 {
7203 case IEMMODE_16BIT:
7204 switch (pVCpu->iem.s.enmEffAddrMode)
7205 {
7206 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7207 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7208 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
7209 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7210 }
7211 break;
7212
7213 case IEMMODE_32BIT:
7214 switch (pVCpu->iem.s.enmEffAddrMode)
7215 {
7216 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7217 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7218 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
7219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7220 }
7221 break;
7222
7223 case IEMMODE_64BIT:
7224 switch (pVCpu->iem.s.enmEffAddrMode)
7225 {
7226 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7227 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
7228 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
7229 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7230 }
7231 break;
7232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7233 }
7234}
7235
7236#undef IEM_CMPS_CASE
7237
7238/**
7239 * @opcode 0xa8
7240 */
7241FNIEMOP_DEF(iemOp_test_AL_Ib)
7242{
7243 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
7244 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7245 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
7246}
7247
7248
7249/**
7250 * @opcode 0xa9
7251 */
7252FNIEMOP_DEF(iemOp_test_eAX_Iz)
7253{
7254 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
7255 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7256 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
7257}
7258
7259
7260/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
7261#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
7262 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7264 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7265 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7266 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
7267 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7268 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7269 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7270 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7271 } IEM_MC_ELSE() { \
7272 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7273 } IEM_MC_ENDIF(); \
7274 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7275 IEM_MC_END() \
7276
7277/**
7278 * @opcode 0xaa
7279 */
7280FNIEMOP_DEF(iemOp_stosb_Yb_AL)
7281{
7282 /*
7283 * Use the C implementation if a repeat prefix is encountered.
7284 */
7285 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7286 {
7287 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
7288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7289 switch (pVCpu->iem.s.enmEffAddrMode)
7290 {
7291 case IEMMODE_16BIT:
7292 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7293 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7294 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7295 iemCImpl_stos_al_m16);
7296 case IEMMODE_32BIT:
7297 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7298 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7299 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7300 iemCImpl_stos_al_m32);
7301 case IEMMODE_64BIT:
7302 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7303 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7304 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7305 iemCImpl_stos_al_m64);
7306 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7307 }
7308 }
7309
7310 /*
7311 * Sharing case implementation with stos[wdq] below.
7312 */
7313 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
7314 switch (pVCpu->iem.s.enmEffAddrMode)
7315 {
7316 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7317 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7318 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
7319 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7320 }
7321}
7322
7323
7324/**
7325 * @opcode 0xab
7326 */
7327FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7328{
7329 /*
7330 * Use the C implementation if a repeat prefix is encountered.
7331 */
7332 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7333 {
7334 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7336 switch (pVCpu->iem.s.enmEffOpSize)
7337 {
7338 case IEMMODE_16BIT:
7339 switch (pVCpu->iem.s.enmEffAddrMode)
7340 {
7341 case IEMMODE_16BIT:
7342 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7343 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7344 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7345 iemCImpl_stos_ax_m16);
7346 case IEMMODE_32BIT:
7347 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7348 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7349 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7350 iemCImpl_stos_ax_m32);
7351 case IEMMODE_64BIT:
7352 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7353 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7354 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7355 iemCImpl_stos_ax_m64);
7356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7357 }
7358 break;
7359 case IEMMODE_32BIT:
7360 switch (pVCpu->iem.s.enmEffAddrMode)
7361 {
7362 case IEMMODE_16BIT:
7363 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7364 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7365 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7366 iemCImpl_stos_eax_m16);
7367 case IEMMODE_32BIT:
7368 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7369 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7370 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7371 iemCImpl_stos_eax_m32);
7372 case IEMMODE_64BIT:
7373 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7374 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7375 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7376 iemCImpl_stos_eax_m64);
7377 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7378 }
7379 case IEMMODE_64BIT:
7380 switch (pVCpu->iem.s.enmEffAddrMode)
7381 {
7382 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7383 case IEMMODE_32BIT:
7384 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7385 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7386 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7387 iemCImpl_stos_rax_m32);
7388 case IEMMODE_64BIT:
7389 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7390 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7391 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7392 iemCImpl_stos_rax_m64);
7393 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7394 }
7395 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7396 }
7397 }
7398
7399 /*
7400 * Annoying double switch here.
7401 * Using ugly macro for implementing the cases, sharing it with stosb.
7402 */
7403 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7404 switch (pVCpu->iem.s.enmEffOpSize)
7405 {
7406 case IEMMODE_16BIT:
7407 switch (pVCpu->iem.s.enmEffAddrMode)
7408 {
7409 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7410 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7411 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7412 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7413 }
7414 break;
7415
7416 case IEMMODE_32BIT:
7417 switch (pVCpu->iem.s.enmEffAddrMode)
7418 {
7419 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7420 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7421 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7422 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7423 }
7424 break;
7425
7426 case IEMMODE_64BIT:
7427 switch (pVCpu->iem.s.enmEffAddrMode)
7428 {
7429 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7430 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7431 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7432 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7433 }
7434 break;
7435 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7436 }
7437}
7438
7439#undef IEM_STOS_CASE
7440
7441/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7442#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7443 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7445 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7446 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7447 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7448 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7449 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7450 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7451 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7452 } IEM_MC_ELSE() { \
7453 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7454 } IEM_MC_ENDIF(); \
7455 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7456 IEM_MC_END() \
7457
7458/**
7459 * @opcode 0xac
7460 */
7461FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7462{
7463 /*
7464 * Use the C implementation if a repeat prefix is encountered.
7465 */
7466 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7467 {
7468 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7470 switch (pVCpu->iem.s.enmEffAddrMode)
7471 {
7472 case IEMMODE_16BIT:
7473 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7474 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7475 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7476 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7477 iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7478 case IEMMODE_32BIT:
7479 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7480 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7481 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7482 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7483 iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7484 case IEMMODE_64BIT:
7485 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7486 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7487 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7488 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7489 iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7491 }
7492 }
7493
7494 /*
7495 * Sharing case implementation with stos[wdq] below.
7496 */
7497 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7498 switch (pVCpu->iem.s.enmEffAddrMode)
7499 {
7500 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7501 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7502 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7504 }
7505}
7506
7507
7508/**
7509 * @opcode 0xad
7510 */
7511FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7512{
7513 /*
7514 * Use the C implementation if a repeat prefix is encountered.
7515 */
7516 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7517 {
7518 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7520 switch (pVCpu->iem.s.enmEffOpSize)
7521 {
7522 case IEMMODE_16BIT:
7523 switch (pVCpu->iem.s.enmEffAddrMode)
7524 {
7525 case IEMMODE_16BIT:
7526 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7527 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7528 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7529 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7530 iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7531 case IEMMODE_32BIT:
7532 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7533 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7534 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7535 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7536 iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7537 case IEMMODE_64BIT:
7538 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7539 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7540 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7541 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7542 iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7543 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7544 }
7545 break;
7546 case IEMMODE_32BIT:
7547 switch (pVCpu->iem.s.enmEffAddrMode)
7548 {
7549 case IEMMODE_16BIT:
7550 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7551 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7552 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7553 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7554 iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7555 case IEMMODE_32BIT:
7556 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7557 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7558 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7559 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7560 iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7561 case IEMMODE_64BIT:
7562 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7563 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7564 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7565 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7566 iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7567 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7568 }
7569 case IEMMODE_64BIT:
7570 switch (pVCpu->iem.s.enmEffAddrMode)
7571 {
7572 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7573 case IEMMODE_32BIT:
7574 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7575 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7576 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7577 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7578 iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7579 case IEMMODE_64BIT:
7580 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7581 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7582 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7583 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7584 iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7586 }
7587 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7588 }
7589 }
7590
7591 /*
7592 * Annoying double switch here.
7593 * Using ugly macro for implementing the cases, sharing it with lodsb.
7594 */
7595 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7596 switch (pVCpu->iem.s.enmEffOpSize)
7597 {
7598 case IEMMODE_16BIT:
7599 switch (pVCpu->iem.s.enmEffAddrMode)
7600 {
7601 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7602 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7603 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
7604 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7605 }
7606 break;
7607
7608 case IEMMODE_32BIT:
7609 switch (pVCpu->iem.s.enmEffAddrMode)
7610 {
7611 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7612 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7613 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
7614 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7615 }
7616 break;
7617
7618 case IEMMODE_64BIT:
7619 switch (pVCpu->iem.s.enmEffAddrMode)
7620 {
7621 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7622 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
7623 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
7624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7625 }
7626 break;
7627 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7628 }
7629}
7630
7631#undef IEM_LODS_CASE
7632
7633/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7634#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
7635 IEM_MC_BEGIN(3, 2, a_fMcFlags, 0); \
7636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7637 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7638 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7639 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7640 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7641 \
7642 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7643 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7644 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7645 IEM_MC_REF_EFLAGS(pEFlags); \
7646 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7647 \
7648 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7649 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7650 } IEM_MC_ELSE() { \
7651 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7652 } IEM_MC_ENDIF(); \
7653 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7654 IEM_MC_END();
7655
7656/**
7657 * @opcode 0xae
7658 */
7659FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7660{
7661 /*
7662 * Use the C implementation if a repeat prefix is encountered.
7663 */
7664 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7665 {
7666 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7668 switch (pVCpu->iem.s.enmEffAddrMode)
7669 {
7670 case IEMMODE_16BIT:
7671 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7672 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7673 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7674 iemCImpl_repe_scas_al_m16);
7675 case IEMMODE_32BIT:
7676 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7677 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7678 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7679 iemCImpl_repe_scas_al_m32);
7680 case IEMMODE_64BIT:
7681 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7682 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7683 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7684 iemCImpl_repe_scas_al_m64);
7685 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7686 }
7687 }
7688 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7689 {
7690 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7692 switch (pVCpu->iem.s.enmEffAddrMode)
7693 {
7694 case IEMMODE_16BIT:
7695 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7696 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7697 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7698 iemCImpl_repne_scas_al_m16);
7699 case IEMMODE_32BIT:
7700 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7701 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7702 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7703 iemCImpl_repne_scas_al_m32);
7704 case IEMMODE_64BIT:
7705 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7706 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7707 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7708 iemCImpl_repne_scas_al_m64);
7709 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7710 }
7711 }
7712
7713 /*
7714 * Sharing case implementation with stos[wdq] below.
7715 */
7716 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7717 switch (pVCpu->iem.s.enmEffAddrMode)
7718 {
7719 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7720 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7721 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
7722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7723 }
7724}
7725
7726
7727/**
7728 * @opcode 0xaf
7729 */
7730FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7731{
7732 /*
7733 * Use the C implementation if a repeat prefix is encountered.
7734 */
7735 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7736 {
7737 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7739 switch (pVCpu->iem.s.enmEffOpSize)
7740 {
7741 case IEMMODE_16BIT:
7742 switch (pVCpu->iem.s.enmEffAddrMode)
7743 {
7744 case IEMMODE_16BIT:
7745 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7746 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7747 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7748 iemCImpl_repe_scas_ax_m16);
7749 case IEMMODE_32BIT:
7750 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7751 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7752 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7753 iemCImpl_repe_scas_ax_m32);
7754 case IEMMODE_64BIT:
7755 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7756 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7757 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7758 iemCImpl_repe_scas_ax_m64);
7759 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7760 }
7761 break;
7762 case IEMMODE_32BIT:
7763 switch (pVCpu->iem.s.enmEffAddrMode)
7764 {
7765 case IEMMODE_16BIT:
7766 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7767 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7768 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7769 iemCImpl_repe_scas_eax_m16);
7770 case IEMMODE_32BIT:
7771 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7772 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7773 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7774 iemCImpl_repe_scas_eax_m32);
7775 case IEMMODE_64BIT:
7776 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7777 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7778 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7779 iemCImpl_repe_scas_eax_m64);
7780 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7781 }
7782 case IEMMODE_64BIT:
7783 switch (pVCpu->iem.s.enmEffAddrMode)
7784 {
7785 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7786 case IEMMODE_32BIT:
7787 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7788 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7789 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7790 iemCImpl_repe_scas_rax_m32);
7791 case IEMMODE_64BIT:
7792 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7793 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7794 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7795 iemCImpl_repe_scas_rax_m64);
7796 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7797 }
7798 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7799 }
7800 }
7801 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7802 {
7803 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7805 switch (pVCpu->iem.s.enmEffOpSize)
7806 {
7807 case IEMMODE_16BIT:
7808 switch (pVCpu->iem.s.enmEffAddrMode)
7809 {
7810 case IEMMODE_16BIT:
7811 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7812 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7813 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7814 iemCImpl_repne_scas_ax_m16);
7815 case IEMMODE_32BIT:
7816 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7817 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7818 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7819 iemCImpl_repne_scas_ax_m32);
7820 case IEMMODE_64BIT:
7821 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7822 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7823 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7824 iemCImpl_repne_scas_ax_m64);
7825 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7826 }
7827 break;
7828 case IEMMODE_32BIT:
7829 switch (pVCpu->iem.s.enmEffAddrMode)
7830 {
7831 case IEMMODE_16BIT:
7832 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7833 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7834 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7835 iemCImpl_repne_scas_eax_m16);
7836 case IEMMODE_32BIT:
7837 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7838 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7839 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7840 iemCImpl_repne_scas_eax_m32);
7841 case IEMMODE_64BIT:
7842 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7843 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7844 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7845 iemCImpl_repne_scas_eax_m64);
7846 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7847 }
7848 case IEMMODE_64BIT:
7849 switch (pVCpu->iem.s.enmEffAddrMode)
7850 {
7851 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
7852 case IEMMODE_32BIT:
7853 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7854 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7855 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7856 iemCImpl_repne_scas_rax_m32);
7857 case IEMMODE_64BIT:
7858 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7859 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7860 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7861 iemCImpl_repne_scas_rax_m64);
7862 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7863 }
7864 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7865 }
7866 }
7867
7868 /*
7869 * Annoying double switch here.
7870 * Using ugly macro for implementing the cases, sharing it with scasb.
7871 */
7872 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
7873 switch (pVCpu->iem.s.enmEffOpSize)
7874 {
7875 case IEMMODE_16BIT:
7876 switch (pVCpu->iem.s.enmEffAddrMode)
7877 {
7878 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7879 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7880 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
7881 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7882 }
7883 break;
7884
7885 case IEMMODE_32BIT:
7886 switch (pVCpu->iem.s.enmEffAddrMode)
7887 {
7888 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7889 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7890 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
7891 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7892 }
7893 break;
7894
7895 case IEMMODE_64BIT:
7896 switch (pVCpu->iem.s.enmEffAddrMode)
7897 {
7898 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7899 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
7900 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
7901 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7902 }
7903 break;
7904 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7905 }
7906}
7907
7908#undef IEM_SCAS_CASE
7909
7910/**
7911 * Common 'mov r8, imm8' helper.
7912 */
7913FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
7914{
7915 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7916 IEM_MC_BEGIN(0, 0, 0, 0);
7917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7918 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
7919 IEM_MC_ADVANCE_RIP_AND_FINISH();
7920 IEM_MC_END();
7921}
7922
7923
7924/**
7925 * @opcode 0xb0
7926 */
7927FNIEMOP_DEF(iemOp_mov_AL_Ib)
7928{
7929 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
7930 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7931}
7932
7933
7934/**
7935 * @opcode 0xb1
7936 */
7937FNIEMOP_DEF(iemOp_CL_Ib)
7938{
7939 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
7940 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7941}
7942
7943
7944/**
7945 * @opcode 0xb2
7946 */
7947FNIEMOP_DEF(iemOp_DL_Ib)
7948{
7949 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
7950 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7951}
7952
7953
7954/**
7955 * @opcode 0xb3
7956 */
7957FNIEMOP_DEF(iemOp_BL_Ib)
7958{
7959 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
7960 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7961}
7962
7963
7964/**
7965 * @opcode 0xb4
7966 */
7967FNIEMOP_DEF(iemOp_mov_AH_Ib)
7968{
7969 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
7970 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7971}
7972
7973
7974/**
7975 * @opcode 0xb5
7976 */
7977FNIEMOP_DEF(iemOp_CH_Ib)
7978{
7979 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
7980 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7981}
7982
7983
7984/**
7985 * @opcode 0xb6
7986 */
7987FNIEMOP_DEF(iemOp_DH_Ib)
7988{
7989 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
7990 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7991}
7992
7993
7994/**
7995 * @opcode 0xb7
7996 */
7997FNIEMOP_DEF(iemOp_BH_Ib)
7998{
7999 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
8000 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8001}
8002
8003
8004/**
8005 * Common 'mov regX,immX' helper.
8006 */
8007FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
8008{
8009 switch (pVCpu->iem.s.enmEffOpSize)
8010 {
8011 case IEMMODE_16BIT:
8012 IEM_MC_BEGIN(0, 0, 0, 0);
8013 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8015 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
8016 IEM_MC_ADVANCE_RIP_AND_FINISH();
8017 IEM_MC_END();
8018 break;
8019
8020 case IEMMODE_32BIT:
8021 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8022 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8024 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
8025 IEM_MC_ADVANCE_RIP_AND_FINISH();
8026 IEM_MC_END();
8027 break;
8028
8029 case IEMMODE_64BIT:
8030 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8031 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
8032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8033 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
8034 IEM_MC_ADVANCE_RIP_AND_FINISH();
8035 IEM_MC_END();
8036 break;
8037 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8038 }
8039}
8040
8041
8042/**
8043 * @opcode 0xb8
8044 */
8045FNIEMOP_DEF(iemOp_eAX_Iv)
8046{
8047 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
8048 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8049}
8050
8051
8052/**
8053 * @opcode 0xb9
8054 */
8055FNIEMOP_DEF(iemOp_eCX_Iv)
8056{
8057 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
8058 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8059}
8060
8061
8062/**
8063 * @opcode 0xba
8064 */
8065FNIEMOP_DEF(iemOp_eDX_Iv)
8066{
8067 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
8068 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8069}
8070
8071
8072/**
8073 * @opcode 0xbb
8074 */
8075FNIEMOP_DEF(iemOp_eBX_Iv)
8076{
8077 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
8078 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8079}
8080
8081
8082/**
8083 * @opcode 0xbc
8084 */
8085FNIEMOP_DEF(iemOp_eSP_Iv)
8086{
8087 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
8088 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8089}
8090
8091
8092/**
8093 * @opcode 0xbd
8094 */
8095FNIEMOP_DEF(iemOp_eBP_Iv)
8096{
8097 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
8098 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8099}
8100
8101
8102/**
8103 * @opcode 0xbe
8104 */
8105FNIEMOP_DEF(iemOp_eSI_Iv)
8106{
8107 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
8108 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8109}
8110
8111
8112/**
8113 * @opcode 0xbf
8114 */
8115FNIEMOP_DEF(iemOp_eDI_Iv)
8116{
8117 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
8118 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8119}
8120
8121
8122/**
8123 * @opcode 0xc0
8124 */
8125FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
8126{
8127 IEMOP_HLP_MIN_186();
8128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8129 PCIEMOPSHIFTSIZES pImpl;
8130 switch (IEM_GET_MODRM_REG_8(bRm))
8131 {
8132 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
8133 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
8134 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
8135 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
8136 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
8137 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
8138 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
8139 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8140 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8141 }
8142 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8143
8144 if (IEM_IS_MODRM_REG_MODE(bRm))
8145 {
8146 /* register */
8147 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8148 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
8149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8150 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8151 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8152 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8153 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8154 IEM_MC_REF_EFLAGS(pEFlags);
8155 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8156 IEM_MC_ADVANCE_RIP_AND_FINISH();
8157 IEM_MC_END();
8158 }
8159 else
8160 {
8161 /* memory */
8162 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0);
8163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8165
8166 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8168
8169 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8170 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8171 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8172
8173 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8174 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8175 IEM_MC_FETCH_EFLAGS(EFlags);
8176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8177
8178 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8179 IEM_MC_COMMIT_EFLAGS(EFlags);
8180 IEM_MC_ADVANCE_RIP_AND_FINISH();
8181 IEM_MC_END();
8182 }
8183}
8184
8185
8186/**
8187 * @opcode 0xc1
8188 */
8189FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
8190{
8191 IEMOP_HLP_MIN_186();
8192 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8193 PCIEMOPSHIFTSIZES pImpl;
8194 switch (IEM_GET_MODRM_REG_8(bRm))
8195 {
8196 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
8197 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
8198 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
8199 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
8200 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
8201 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
8202 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
8203 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8204 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8205 }
8206 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8207
8208 if (IEM_IS_MODRM_REG_MODE(bRm))
8209 {
8210 /* register */
8211 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8212 switch (pVCpu->iem.s.enmEffOpSize)
8213 {
8214 case IEMMODE_16BIT:
8215 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
8216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8217 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8218 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8219 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8220 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8221 IEM_MC_REF_EFLAGS(pEFlags);
8222 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8223 IEM_MC_ADVANCE_RIP_AND_FINISH();
8224 IEM_MC_END();
8225 break;
8226
8227 case IEMMODE_32BIT:
8228 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8230 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8231 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8232 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8233 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8234 IEM_MC_REF_EFLAGS(pEFlags);
8235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8236 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
8237 IEM_MC_ADVANCE_RIP_AND_FINISH();
8238 IEM_MC_END();
8239 break;
8240
8241 case IEMMODE_64BIT:
8242 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8244 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8245 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8246 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8247 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8248 IEM_MC_REF_EFLAGS(pEFlags);
8249 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8250 IEM_MC_ADVANCE_RIP_AND_FINISH();
8251 IEM_MC_END();
8252 break;
8253
8254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8255 }
8256 }
8257 else
8258 {
8259 /* memory */
8260 switch (pVCpu->iem.s.enmEffOpSize)
8261 {
8262 case IEMMODE_16BIT:
8263 IEM_MC_BEGIN(3, 3, 0, 0);
8264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8266
8267 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8269
8270 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8271 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8272 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8273
8274 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8275 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8276 IEM_MC_FETCH_EFLAGS(EFlags);
8277 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8278
8279 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
8280 IEM_MC_COMMIT_EFLAGS(EFlags);
8281 IEM_MC_ADVANCE_RIP_AND_FINISH();
8282 IEM_MC_END();
8283 break;
8284
8285 case IEMMODE_32BIT:
8286 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8289
8290 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8292
8293 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8294 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8295 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8296
8297 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8298 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8299 IEM_MC_FETCH_EFLAGS(EFlags);
8300 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8301
8302 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
8303 IEM_MC_COMMIT_EFLAGS(EFlags);
8304 IEM_MC_ADVANCE_RIP_AND_FINISH();
8305 IEM_MC_END();
8306 break;
8307
8308 case IEMMODE_64BIT:
8309 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8310 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8312
8313 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8315
8316 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8317 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8318 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8319
8320 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8321 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8322 IEM_MC_FETCH_EFLAGS(EFlags);
8323 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8324
8325 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
8326 IEM_MC_COMMIT_EFLAGS(EFlags);
8327 IEM_MC_ADVANCE_RIP_AND_FINISH();
8328 IEM_MC_END();
8329 break;
8330
8331 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8332 }
8333 }
8334}
8335
8336
8337/**
8338 * @opcode 0xc2
8339 */
8340FNIEMOP_DEF(iemOp_retn_Iw)
8341{
8342 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
8343 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8344 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8346 switch (pVCpu->iem.s.enmEffOpSize)
8347 {
8348 case IEMMODE_16BIT:
8349 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_iw_16, u16Imm);
8350 case IEMMODE_32BIT:
8351 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_iw_32, u16Imm);
8352 case IEMMODE_64BIT:
8353 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_iw_64, u16Imm);
8354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8355 }
8356}
8357
8358
8359/**
8360 * @opcode 0xc3
8361 */
8362FNIEMOP_DEF(iemOp_retn)
8363{
8364 IEMOP_MNEMONIC(retn, "retn");
8365 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8367 switch (pVCpu->iem.s.enmEffOpSize)
8368 {
8369 case IEMMODE_16BIT:
8370 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_16);
8371 case IEMMODE_32BIT:
8372 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_32);
8373 case IEMMODE_64BIT:
8374 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_64);
8375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8376 }
8377}
8378
8379
8380/**
8381 * @opcode 0xc4
8382 */
8383FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
8384{
8385 /* The LDS instruction is invalid 64-bit mode. In legacy and
8386 compatability mode it is invalid with MOD=3.
8387 The use as a VEX prefix is made possible by assigning the inverted
8388 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
8389 outside of 64-bit mode. VEX is not available in real or v86 mode. */
8390 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8391 if ( IEM_IS_64BIT_CODE(pVCpu)
8392 || IEM_IS_MODRM_REG_MODE(bRm) )
8393 {
8394 IEMOP_MNEMONIC(vex3_prefix, "vex3");
8395 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8396 {
8397 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8398 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8399 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
8400 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8401 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8402 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
8403 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
8404 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8405 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
8406 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
8407 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
8408 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
8409 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
8410
8411 switch (bRm & 0x1f)
8412 {
8413 case 1: /* 0x0f lead opcode byte. */
8414#ifdef IEM_WITH_VEX
8415 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8416#else
8417 IEMOP_BITCH_ABOUT_STUB();
8418 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8419#endif
8420
8421 case 2: /* 0x0f 0x38 lead opcode bytes. */
8422#ifdef IEM_WITH_VEX
8423 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8424#else
8425 IEMOP_BITCH_ABOUT_STUB();
8426 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8427#endif
8428
8429 case 3: /* 0x0f 0x3a lead opcode bytes. */
8430#ifdef IEM_WITH_VEX
8431 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8432#else
8433 IEMOP_BITCH_ABOUT_STUB();
8434 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8435#endif
8436
8437 default:
8438 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
8439 IEMOP_RAISE_INVALID_OPCODE_RET();
8440 }
8441 }
8442 Log(("VEX3: VEX support disabled!\n"));
8443 IEMOP_RAISE_INVALID_OPCODE_RET();
8444 }
8445
8446 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
8447 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
8448}
8449
8450
8451/**
8452 * @opcode 0xc5
8453 */
8454FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
8455{
8456 /* The LES instruction is invalid 64-bit mode. In legacy and
8457 compatability mode it is invalid with MOD=3.
8458 The use as a VEX prefix is made possible by assigning the inverted
8459 REX.R to the top MOD bit, and the top bit in the inverted register
8460 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
8461 to accessing registers 0..7 in this VEX form. */
8462 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8463 if ( IEM_IS_64BIT_CODE(pVCpu)
8464 || IEM_IS_MODRM_REG_MODE(bRm))
8465 {
8466 IEMOP_MNEMONIC(vex2_prefix, "vex2");
8467 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8468 {
8469 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8470 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8471 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8472 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8473 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8474 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
8475 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
8476 pVCpu->iem.s.idxPrefix = bRm & 0x3;
8477
8478#ifdef IEM_WITH_VEX
8479 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8480#else
8481 IEMOP_BITCH_ABOUT_STUB();
8482 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8483#endif
8484 }
8485
8486 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
8487 Log(("VEX2: VEX support disabled!\n"));
8488 IEMOP_RAISE_INVALID_OPCODE_RET();
8489 }
8490
8491 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
8492 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
8493}
8494
8495
8496/**
8497 * @opcode 0xc6
8498 */
8499FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
8500{
8501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8502 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8503 IEMOP_RAISE_INVALID_OPCODE_RET();
8504 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
8505
8506 if (IEM_IS_MODRM_REG_MODE(bRm))
8507 {
8508 /* register access */
8509 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8510 IEM_MC_BEGIN(0, 0, 0, 0);
8511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8512 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
8513 IEM_MC_ADVANCE_RIP_AND_FINISH();
8514 IEM_MC_END();
8515 }
8516 else
8517 {
8518 /* memory access. */
8519 IEM_MC_BEGIN(0, 1, 0, 0);
8520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8522 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8524 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
8525 IEM_MC_ADVANCE_RIP_AND_FINISH();
8526 IEM_MC_END();
8527 }
8528}
8529
8530
8531/**
8532 * @opcode 0xc7
8533 */
8534FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
8535{
8536 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8537 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
8538 IEMOP_RAISE_INVALID_OPCODE_RET();
8539 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
8540
8541 if (IEM_IS_MODRM_REG_MODE(bRm))
8542 {
8543 /* register access */
8544 switch (pVCpu->iem.s.enmEffOpSize)
8545 {
8546 case IEMMODE_16BIT:
8547 IEM_MC_BEGIN(0, 0, 0, 0);
8548 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8550 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8551 IEM_MC_ADVANCE_RIP_AND_FINISH();
8552 IEM_MC_END();
8553 break;
8554
8555 case IEMMODE_32BIT:
8556 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8557 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8559 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8560 IEM_MC_ADVANCE_RIP_AND_FINISH();
8561 IEM_MC_END();
8562 break;
8563
8564 case IEMMODE_64BIT:
8565 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
8566 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8568 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8569 IEM_MC_ADVANCE_RIP_AND_FINISH();
8570 IEM_MC_END();
8571 break;
8572
8573 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8574 }
8575 }
8576 else
8577 {
8578 /* memory access. */
8579 switch (pVCpu->iem.s.enmEffOpSize)
8580 {
8581 case IEMMODE_16BIT:
8582 IEM_MC_BEGIN(0, 1, 0, 0);
8583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8585 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8587 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8588 IEM_MC_ADVANCE_RIP_AND_FINISH();
8589 IEM_MC_END();
8590 break;
8591
8592 case IEMMODE_32BIT:
8593 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8596 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8598 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8599 IEM_MC_ADVANCE_RIP_AND_FINISH();
8600 IEM_MC_END();
8601 break;
8602
8603 case IEMMODE_64BIT:
8604 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8607 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8609 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8610 IEM_MC_ADVANCE_RIP_AND_FINISH();
8611 IEM_MC_END();
8612 break;
8613
8614 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8615 }
8616 }
8617}
8618
8619
8620
8621
8622/**
8623 * @opcode 0xc8
8624 */
8625FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8626{
8627 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8628 IEMOP_HLP_MIN_186();
8629 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8630 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
8631 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
8632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8633 IEM_MC_DEFER_TO_CIMPL_3_RET(0,
8634 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8635 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8636 iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
8637}
8638
8639
8640/**
8641 * @opcode 0xc9
8642 */
8643FNIEMOP_DEF(iemOp_leave)
8644{
8645 IEMOP_MNEMONIC(leave, "leave");
8646 IEMOP_HLP_MIN_186();
8647 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8649 IEM_MC_DEFER_TO_CIMPL_1_RET(0,
8650 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8651 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8652 iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
8653}
8654
8655
8656/**
8657 * @opcode 0xca
8658 */
8659FNIEMOP_DEF(iemOp_retf_Iw)
8660{
8661 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
8662 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8664 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8665 | IEM_CIMPL_F_MODE,
8666 RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8667 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8668 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8669 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8670 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8671 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8672 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8673 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8674 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8675 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8676 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8677 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS),
8678 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
8679}
8680
8681
8682/**
8683 * @opcode 0xcb
8684 */
8685FNIEMOP_DEF(iemOp_retf)
8686{
8687 IEMOP_MNEMONIC(retf, "retf");
8688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8689 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8690 | IEM_CIMPL_F_MODE,
8691 RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8692 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8693 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8694 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8695 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8696 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8697 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8698 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8699 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8700 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8701 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8702 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS),
8703 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
8704}
8705
8706
8707/**
8708 * @opcode 0xcc
8709 */
8710FNIEMOP_DEF(iemOp_int3)
8711{
8712 IEMOP_MNEMONIC(int3, "int3");
8713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8714 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8715 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
8716 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
8717}
8718
8719
8720/**
8721 * @opcode 0xcd
8722 */
8723FNIEMOP_DEF(iemOp_int_Ib)
8724{
8725 IEMOP_MNEMONIC(int_Ib, "int Ib");
8726 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
8727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8728 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8729 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, UINT64_MAX,
8730 iemCImpl_int, u8Int, IEMINT_INTN);
8731 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8732}
8733
8734
8735/**
8736 * @opcode 0xce
8737 */
8738FNIEMOP_DEF(iemOp_into)
8739{
8740 IEMOP_MNEMONIC(into, "into");
8741 IEMOP_HLP_NO_64BIT();
8742 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8743 | IEM_CIMPL_F_BRANCH_CONDITIONAL | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8744 UINT64_MAX,
8745 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
8746 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8747}
8748
8749
8750/**
8751 * @opcode 0xcf
8752 */
8753FNIEMOP_DEF(iemOp_iret)
8754{
8755 IEMOP_MNEMONIC(iret, "iret");
8756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8757 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8758 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
8759 RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8760 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8761 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8762 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8763 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8764 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8765 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8766 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8767 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8768 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8769 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8770 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS),
8771 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
8772 /* Segment registers are sanitized when returning to an outer ring, or fully
8773 reloaded when returning to v86 mode. Thus the large flush list above. */
8774}
8775
8776
8777/**
8778 * @opcode 0xd0
8779 */
8780FNIEMOP_DEF(iemOp_Grp2_Eb_1)
8781{
8782 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8783 PCIEMOPSHIFTSIZES pImpl;
8784 switch (IEM_GET_MODRM_REG_8(bRm))
8785 {
8786 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
8787 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
8788 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
8789 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
8790 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
8791 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
8792 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
8793 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8794 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8795 }
8796 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8797
8798 if (IEM_IS_MODRM_REG_MODE(bRm))
8799 {
8800 /* register */
8801 IEM_MC_BEGIN(3, 0, 0, 0);
8802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8803 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8804 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8805 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8806 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8807 IEM_MC_REF_EFLAGS(pEFlags);
8808 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8809 IEM_MC_ADVANCE_RIP_AND_FINISH();
8810 IEM_MC_END();
8811 }
8812 else
8813 {
8814 /* memory */
8815 IEM_MC_BEGIN(3, 3, 0, 0);
8816 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8817 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8818 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8820 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8821
8822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8824 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8825 IEM_MC_FETCH_EFLAGS(EFlags);
8826 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8827
8828 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8829 IEM_MC_COMMIT_EFLAGS(EFlags);
8830 IEM_MC_ADVANCE_RIP_AND_FINISH();
8831 IEM_MC_END();
8832 }
8833}
8834
8835
8836
8837/**
8838 * @opcode 0xd1
8839 */
8840FNIEMOP_DEF(iemOp_Grp2_Ev_1)
8841{
8842 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8843 PCIEMOPSHIFTSIZES pImpl;
8844 switch (IEM_GET_MODRM_REG_8(bRm))
8845 {
8846 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
8847 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
8848 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
8849 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
8850 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
8851 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
8852 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
8853 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8854 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8855 }
8856 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8857
8858 if (IEM_IS_MODRM_REG_MODE(bRm))
8859 {
8860 /* register */
8861 switch (pVCpu->iem.s.enmEffOpSize)
8862 {
8863 case IEMMODE_16BIT:
8864 IEM_MC_BEGIN(3, 0, 0, 0);
8865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8866 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8867 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8868 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8869 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8870 IEM_MC_REF_EFLAGS(pEFlags);
8871 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8872 IEM_MC_ADVANCE_RIP_AND_FINISH();
8873 IEM_MC_END();
8874 break;
8875
8876 case IEMMODE_32BIT:
8877 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8879 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8880 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8881 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8882 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8883 IEM_MC_REF_EFLAGS(pEFlags);
8884 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8885 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
8886 IEM_MC_ADVANCE_RIP_AND_FINISH();
8887 IEM_MC_END();
8888 break;
8889
8890 case IEMMODE_64BIT:
8891 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8893 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8894 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8895 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8896 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8897 IEM_MC_REF_EFLAGS(pEFlags);
8898 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8899 IEM_MC_ADVANCE_RIP_AND_FINISH();
8900 IEM_MC_END();
8901 break;
8902
8903 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8904 }
8905 }
8906 else
8907 {
8908 /* memory */
8909 switch (pVCpu->iem.s.enmEffOpSize)
8910 {
8911 case IEMMODE_16BIT:
8912 IEM_MC_BEGIN(3, 3, 0, 0);
8913 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8914 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8915 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8916 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8917 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8918
8919 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8921 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8922 IEM_MC_FETCH_EFLAGS(EFlags);
8923 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8924
8925 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
8926 IEM_MC_COMMIT_EFLAGS(EFlags);
8927 IEM_MC_ADVANCE_RIP_AND_FINISH();
8928 IEM_MC_END();
8929 break;
8930
8931 case IEMMODE_32BIT:
8932 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8933 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8934 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8935 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8937 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8938
8939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8941 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8942 IEM_MC_FETCH_EFLAGS(EFlags);
8943 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8944
8945 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
8946 IEM_MC_COMMIT_EFLAGS(EFlags);
8947 IEM_MC_ADVANCE_RIP_AND_FINISH();
8948 IEM_MC_END();
8949 break;
8950
8951 case IEMMODE_64BIT:
8952 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8953 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8954 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8955 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8957 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8958
8959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8961 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8962 IEM_MC_FETCH_EFLAGS(EFlags);
8963 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8964
8965 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
8966 IEM_MC_COMMIT_EFLAGS(EFlags);
8967 IEM_MC_ADVANCE_RIP_AND_FINISH();
8968 IEM_MC_END();
8969 break;
8970
8971 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8972 }
8973 }
8974}
8975
8976
8977/**
8978 * @opcode 0xd2
8979 */
8980FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
8981{
8982 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8983 PCIEMOPSHIFTSIZES pImpl;
8984 switch (IEM_GET_MODRM_REG_8(bRm))
8985 {
8986 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
8987 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
8988 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
8989 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
8990 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
8991 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
8992 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
8993 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8994 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
8995 }
8996 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8997
8998 if (IEM_IS_MODRM_REG_MODE(bRm))
8999 {
9000 /* register */
9001 IEM_MC_BEGIN(3, 0, 0, 0);
9002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9003 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9004 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9005 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9006 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9007 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9008 IEM_MC_REF_EFLAGS(pEFlags);
9009 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
9010 IEM_MC_ADVANCE_RIP_AND_FINISH();
9011 IEM_MC_END();
9012 }
9013 else
9014 {
9015 /* memory */
9016 IEM_MC_BEGIN(3, 3, 0, 0);
9017 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9018 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9019 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9021 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9022
9023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9025 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9026 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9027 IEM_MC_FETCH_EFLAGS(EFlags);
9028 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
9029
9030 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
9031 IEM_MC_COMMIT_EFLAGS(EFlags);
9032 IEM_MC_ADVANCE_RIP_AND_FINISH();
9033 IEM_MC_END();
9034 }
9035}
9036
9037
9038/**
9039 * @opcode 0xd3
9040 */
9041FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
9042{
9043 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9044 PCIEMOPSHIFTSIZES pImpl;
9045 switch (IEM_GET_MODRM_REG_8(bRm))
9046 {
9047 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
9048 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
9049 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
9050 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
9051 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
9052 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
9053 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
9054 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9055 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
9056 }
9057 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
9058
9059 if (IEM_IS_MODRM_REG_MODE(bRm))
9060 {
9061 /* register */
9062 switch (pVCpu->iem.s.enmEffOpSize)
9063 {
9064 case IEMMODE_16BIT:
9065 IEM_MC_BEGIN(3, 0, 0, 0);
9066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9067 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9068 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9069 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9070 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9071 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9072 IEM_MC_REF_EFLAGS(pEFlags);
9073 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
9074 IEM_MC_ADVANCE_RIP_AND_FINISH();
9075 IEM_MC_END();
9076 break;
9077
9078 case IEMMODE_32BIT:
9079 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
9080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9081 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9082 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9083 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9084 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9085 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9086 IEM_MC_REF_EFLAGS(pEFlags);
9087 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
9088 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
9089 IEM_MC_ADVANCE_RIP_AND_FINISH();
9090 IEM_MC_END();
9091 break;
9092
9093 case IEMMODE_64BIT:
9094 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
9095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9096 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9097 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9098 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9099 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9100 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9101 IEM_MC_REF_EFLAGS(pEFlags);
9102 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
9103 IEM_MC_ADVANCE_RIP_AND_FINISH();
9104 IEM_MC_END();
9105 break;
9106
9107 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9108 }
9109 }
9110 else
9111 {
9112 /* memory */
9113 switch (pVCpu->iem.s.enmEffOpSize)
9114 {
9115 case IEMMODE_16BIT:
9116 IEM_MC_BEGIN(3, 3, 0, 0);
9117 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9118 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9119 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9121 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9122
9123 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9125 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9126 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9127 IEM_MC_FETCH_EFLAGS(EFlags);
9128 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
9129
9130 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
9131 IEM_MC_COMMIT_EFLAGS(EFlags);
9132 IEM_MC_ADVANCE_RIP_AND_FINISH();
9133 IEM_MC_END();
9134 break;
9135
9136 case IEMMODE_32BIT:
9137 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
9138 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9139 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9140 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9142 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9143
9144 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9146 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9147 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9148 IEM_MC_FETCH_EFLAGS(EFlags);
9149 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
9150
9151 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
9152 IEM_MC_COMMIT_EFLAGS(EFlags);
9153 IEM_MC_ADVANCE_RIP_AND_FINISH();
9154 IEM_MC_END();
9155 break;
9156
9157 case IEMMODE_64BIT:
9158 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
9159 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9160 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9161 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9163 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9164
9165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9167 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9168 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9169 IEM_MC_FETCH_EFLAGS(EFlags);
9170 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
9171
9172 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
9173 IEM_MC_COMMIT_EFLAGS(EFlags);
9174 IEM_MC_ADVANCE_RIP_AND_FINISH();
9175 IEM_MC_END();
9176 break;
9177
9178 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9179 }
9180 }
9181}
9182
9183/**
9184 * @opcode 0xd4
9185 */
9186FNIEMOP_DEF(iemOp_aam_Ib)
9187{
9188 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
9189 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9191 IEMOP_HLP_NO_64BIT();
9192 if (!bImm)
9193 IEMOP_RAISE_DIVIDE_ERROR_RET();
9194 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aam, bImm);
9195}
9196
9197
9198/**
9199 * @opcode 0xd5
9200 */
9201FNIEMOP_DEF(iemOp_aad_Ib)
9202{
9203 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
9204 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9206 IEMOP_HLP_NO_64BIT();
9207 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aad, bImm);
9208}
9209
9210
9211/**
9212 * @opcode 0xd6
9213 */
9214FNIEMOP_DEF(iemOp_salc)
9215{
9216 IEMOP_MNEMONIC(salc, "salc");
9217 IEMOP_HLP_NO_64BIT();
9218
9219 IEM_MC_BEGIN(0, 0, 0, 0);
9220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9221 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9222 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
9223 } IEM_MC_ELSE() {
9224 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
9225 } IEM_MC_ENDIF();
9226 IEM_MC_ADVANCE_RIP_AND_FINISH();
9227 IEM_MC_END();
9228}
9229
9230
9231/**
9232 * @opcode 0xd7
9233 */
9234FNIEMOP_DEF(iemOp_xlat)
9235{
9236 IEMOP_MNEMONIC(xlat, "xlat");
9237 switch (pVCpu->iem.s.enmEffAddrMode)
9238 {
9239 case IEMMODE_16BIT:
9240 IEM_MC_BEGIN(2, 0, 0, 0);
9241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9242 IEM_MC_LOCAL(uint8_t, u8Tmp);
9243 IEM_MC_LOCAL(uint16_t, u16Addr);
9244 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
9245 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
9246 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
9247 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9248 IEM_MC_ADVANCE_RIP_AND_FINISH();
9249 IEM_MC_END();
9250 break;
9251
9252 case IEMMODE_32BIT:
9253 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
9254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9255 IEM_MC_LOCAL(uint8_t, u8Tmp);
9256 IEM_MC_LOCAL(uint32_t, u32Addr);
9257 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
9258 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
9259 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
9260 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9261 IEM_MC_ADVANCE_RIP_AND_FINISH();
9262 IEM_MC_END();
9263 break;
9264
9265 case IEMMODE_64BIT:
9266 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
9267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9268 IEM_MC_LOCAL(uint8_t, u8Tmp);
9269 IEM_MC_LOCAL(uint64_t, u64Addr);
9270 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
9271 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
9272 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
9273 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9274 IEM_MC_ADVANCE_RIP_AND_FINISH();
9275 IEM_MC_END();
9276 break;
9277
9278 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9279 }
9280}
9281
9282
9283/**
9284 * Common worker for FPU instructions working on ST0 and STn, and storing the
9285 * result in ST0.
9286 *
9287 * @param bRm Mod R/M byte.
9288 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9289 */
9290FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9291{
9292 IEM_MC_BEGIN(3, 1, 0, 0);
9293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9294 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9295 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9296 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9297 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9298
9299 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9300 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9301 IEM_MC_PREPARE_FPU_USAGE();
9302 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9303 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9304 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9305 } IEM_MC_ELSE() {
9306 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9307 } IEM_MC_ENDIF();
9308 IEM_MC_ADVANCE_RIP_AND_FINISH();
9309
9310 IEM_MC_END();
9311}
9312
9313
9314/**
9315 * Common worker for FPU instructions working on ST0 and STn, and only affecting
9316 * flags.
9317 *
9318 * @param bRm Mod R/M byte.
9319 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9320 */
9321FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9322{
9323 IEM_MC_BEGIN(3, 1, 0, 0);
9324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9325 IEM_MC_LOCAL(uint16_t, u16Fsw);
9326 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9327 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9328 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9329
9330 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9331 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9332 IEM_MC_PREPARE_FPU_USAGE();
9333 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9334 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9335 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9336 } IEM_MC_ELSE() {
9337 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9338 } IEM_MC_ENDIF();
9339 IEM_MC_ADVANCE_RIP_AND_FINISH();
9340
9341 IEM_MC_END();
9342}
9343
9344
9345/**
9346 * Common worker for FPU instructions working on ST0 and STn, only affecting
9347 * flags, and popping when done.
9348 *
9349 * @param bRm Mod R/M byte.
9350 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9351 */
9352FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9353{
9354 IEM_MC_BEGIN(3, 1, 0, 0);
9355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9356 IEM_MC_LOCAL(uint16_t, u16Fsw);
9357 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9358 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9359 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9360
9361 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9362 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9363 IEM_MC_PREPARE_FPU_USAGE();
9364 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9365 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9366 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9367 } IEM_MC_ELSE() {
9368 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9369 } IEM_MC_ENDIF();
9370 IEM_MC_ADVANCE_RIP_AND_FINISH();
9371
9372 IEM_MC_END();
9373}
9374
9375
9376/** Opcode 0xd8 11/0. */
9377FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
9378{
9379 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
9380 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
9381}
9382
9383
9384/** Opcode 0xd8 11/1. */
9385FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
9386{
9387 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
9388 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
9389}
9390
9391
9392/** Opcode 0xd8 11/2. */
9393FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
9394{
9395 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
9396 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
9397}
9398
9399
9400/** Opcode 0xd8 11/3. */
9401FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
9402{
9403 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
9404 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
9405}
9406
9407
9408/** Opcode 0xd8 11/4. */
9409FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
9410{
9411 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
9412 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
9413}
9414
9415
9416/** Opcode 0xd8 11/5. */
9417FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
9418{
9419 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
9420 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
9421}
9422
9423
9424/** Opcode 0xd8 11/6. */
9425FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
9426{
9427 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
9428 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
9429}
9430
9431
9432/** Opcode 0xd8 11/7. */
9433FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
9434{
9435 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
9436 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
9437}
9438
9439
9440/**
9441 * Common worker for FPU instructions working on ST0 and an m32r, and storing
9442 * the result in ST0.
9443 *
9444 * @param bRm Mod R/M byte.
9445 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9446 */
9447FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
9448{
9449 IEM_MC_BEGIN(3, 3, 0, 0);
9450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9451 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9452 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9453 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9454 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9455 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9456
9457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9459
9460 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9461 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9462 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9463
9464 IEM_MC_PREPARE_FPU_USAGE();
9465 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9466 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
9467 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9468 } IEM_MC_ELSE() {
9469 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9470 } IEM_MC_ENDIF();
9471 IEM_MC_ADVANCE_RIP_AND_FINISH();
9472
9473 IEM_MC_END();
9474}
9475
9476
9477/** Opcode 0xd8 !11/0. */
9478FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
9479{
9480 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
9481 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
9482}
9483
9484
9485/** Opcode 0xd8 !11/1. */
9486FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
9487{
9488 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
9489 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
9490}
9491
9492
9493/** Opcode 0xd8 !11/2. */
9494FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
9495{
9496 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
9497
9498 IEM_MC_BEGIN(3, 3, 0, 0);
9499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9500 IEM_MC_LOCAL(uint16_t, u16Fsw);
9501 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9502 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9503 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9504 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9505
9506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9508
9509 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9510 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9511 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9512
9513 IEM_MC_PREPARE_FPU_USAGE();
9514 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9515 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
9516 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9517 } IEM_MC_ELSE() {
9518 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9519 } IEM_MC_ENDIF();
9520 IEM_MC_ADVANCE_RIP_AND_FINISH();
9521
9522 IEM_MC_END();
9523}
9524
9525
9526/** Opcode 0xd8 !11/3. */
9527FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
9528{
9529 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
9530
9531 IEM_MC_BEGIN(3, 3, 0, 0);
9532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9533 IEM_MC_LOCAL(uint16_t, u16Fsw);
9534 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9535 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9536 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9537 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9538
9539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9541
9542 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9543 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9544 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9545
9546 IEM_MC_PREPARE_FPU_USAGE();
9547 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9548 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
9549 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9550 } IEM_MC_ELSE() {
9551 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9552 } IEM_MC_ENDIF();
9553 IEM_MC_ADVANCE_RIP_AND_FINISH();
9554
9555 IEM_MC_END();
9556}
9557
9558
9559/** Opcode 0xd8 !11/4. */
9560FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
9561{
9562 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
9563 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
9564}
9565
9566
9567/** Opcode 0xd8 !11/5. */
9568FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
9569{
9570 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
9571 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
9572}
9573
9574
9575/** Opcode 0xd8 !11/6. */
9576FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
9577{
9578 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
9579 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
9580}
9581
9582
9583/** Opcode 0xd8 !11/7. */
9584FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
9585{
9586 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
9587 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
9588}
9589
9590
9591/**
9592 * @opcode 0xd8
9593 */
9594FNIEMOP_DEF(iemOp_EscF0)
9595{
9596 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9597 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
9598
9599 if (IEM_IS_MODRM_REG_MODE(bRm))
9600 {
9601 switch (IEM_GET_MODRM_REG_8(bRm))
9602 {
9603 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
9604 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
9605 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
9606 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9607 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
9608 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
9609 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
9610 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
9611 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9612 }
9613 }
9614 else
9615 {
9616 switch (IEM_GET_MODRM_REG_8(bRm))
9617 {
9618 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
9619 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
9620 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
9621 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
9622 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
9623 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
9624 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
9625 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
9626 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9627 }
9628 }
9629}
9630
9631
9632/** Opcode 0xd9 /0 mem32real
9633 * @sa iemOp_fld_m64r */
9634FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
9635{
9636 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
9637
9638 IEM_MC_BEGIN(2, 3, 0, 0);
9639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9640 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9641 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
9642 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9643 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
9644
9645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9647
9648 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9649 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9650 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9651 IEM_MC_PREPARE_FPU_USAGE();
9652 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9653 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
9654 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9655 } IEM_MC_ELSE() {
9656 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9657 } IEM_MC_ENDIF();
9658 IEM_MC_ADVANCE_RIP_AND_FINISH();
9659
9660 IEM_MC_END();
9661}
9662
9663
9664/** Opcode 0xd9 !11/2 mem32real */
9665FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
9666{
9667 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
9668 IEM_MC_BEGIN(3, 2, 0, 0);
9669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9670 IEM_MC_LOCAL(uint16_t, u16Fsw);
9671 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9672 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9673 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9674
9675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9677 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9678 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9679
9680 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9681 IEM_MC_PREPARE_FPU_USAGE();
9682 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9683 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9684 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9685 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9686 } IEM_MC_ELSE() {
9687 IEM_MC_IF_FCW_IM() {
9688 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9689 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9690 } IEM_MC_ENDIF();
9691 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9692 } IEM_MC_ENDIF();
9693 IEM_MC_ADVANCE_RIP_AND_FINISH();
9694
9695 IEM_MC_END();
9696}
9697
9698
9699/** Opcode 0xd9 !11/3 */
9700FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
9701{
9702 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
9703 IEM_MC_BEGIN(3, 2, 0, 0);
9704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9705 IEM_MC_LOCAL(uint16_t, u16Fsw);
9706 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9707 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9708 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9709
9710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9712 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9713 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9714
9715 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9716 IEM_MC_PREPARE_FPU_USAGE();
9717 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9718 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9719 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9720 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9721 } IEM_MC_ELSE() {
9722 IEM_MC_IF_FCW_IM() {
9723 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9724 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9725 } IEM_MC_ENDIF();
9726 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9727 } IEM_MC_ENDIF();
9728 IEM_MC_ADVANCE_RIP_AND_FINISH();
9729
9730 IEM_MC_END();
9731}
9732
9733
9734/** Opcode 0xd9 !11/4 */
9735FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
9736{
9737 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
9738 IEM_MC_BEGIN(3, 0, 0, 0);
9739 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9741
9742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9743 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9744 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9745
9746 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9747 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
9748 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9749 IEM_MC_END();
9750}
9751
9752
9753/** Opcode 0xd9 !11/5 */
9754FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
9755{
9756 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
9757 IEM_MC_BEGIN(1, 1, 0, 0);
9758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9760
9761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9762 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9763 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9764
9765 IEM_MC_ARG(uint16_t, u16Fsw, 0);
9766 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9767
9768 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, 0, iemCImpl_fldcw, u16Fsw);
9769 IEM_MC_END();
9770}
9771
9772
9773/** Opcode 0xd9 !11/6 */
9774FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
9775{
9776 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
9777 IEM_MC_BEGIN(3, 0, 0, 0);
9778 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9780
9781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9782 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9783 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9784
9785 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9786 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
9787 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
9788 IEM_MC_END();
9789}
9790
9791
9792/** Opcode 0xd9 !11/7 */
9793FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
9794{
9795 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
9796 IEM_MC_BEGIN(2, 0, 0, 0);
9797 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9798 IEM_MC_LOCAL(uint16_t, u16Fcw);
9799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9801 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9802 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9803 IEM_MC_FETCH_FCW(u16Fcw);
9804 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
9805 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9806 IEM_MC_END();
9807}
9808
9809
9810/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
9811FNIEMOP_DEF(iemOp_fnop)
9812{
9813 IEMOP_MNEMONIC(fnop, "fnop");
9814 IEM_MC_BEGIN(0, 0, 0, 0);
9815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9816 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9817 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9818 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9819 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
9820 * intel optimizations. Investigate. */
9821 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9822 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9823 IEM_MC_END();
9824}
9825
9826
9827/** Opcode 0xd9 11/0 stN */
9828FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
9829{
9830 IEMOP_MNEMONIC(fld_stN, "fld stN");
9831 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9832 * indicates that it does. */
9833 IEM_MC_BEGIN(0, 2, 0, 0);
9834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9835 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9836 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9837 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9838 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9839
9840 IEM_MC_PREPARE_FPU_USAGE();
9841 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
9842 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9843 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9844 } IEM_MC_ELSE() {
9845 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
9846 } IEM_MC_ENDIF();
9847
9848 IEM_MC_ADVANCE_RIP_AND_FINISH();
9849 IEM_MC_END();
9850}
9851
9852
9853/** Opcode 0xd9 11/3 stN */
9854FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
9855{
9856 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
9857 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9858 * indicates that it does. */
9859 IEM_MC_BEGIN(2, 3, 0, 0);
9860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9861 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
9862 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
9863 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9864 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
9865 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
9866 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9867 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9868
9869 IEM_MC_PREPARE_FPU_USAGE();
9870 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9871 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
9872 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
9873 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9874 } IEM_MC_ELSE() {
9875 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
9876 } IEM_MC_ENDIF();
9877
9878 IEM_MC_ADVANCE_RIP_AND_FINISH();
9879 IEM_MC_END();
9880}
9881
9882
9883/** Opcode 0xd9 11/4, 0xdd 11/2. */
9884FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
9885{
9886 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
9887
9888 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
9889 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
9890 if (!iDstReg)
9891 {
9892 IEM_MC_BEGIN(0, 1, 0, 0);
9893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9894 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
9895 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9896 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9897
9898 IEM_MC_PREPARE_FPU_USAGE();
9899 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
9900 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9901 } IEM_MC_ELSE() {
9902 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
9903 } IEM_MC_ENDIF();
9904
9905 IEM_MC_ADVANCE_RIP_AND_FINISH();
9906 IEM_MC_END();
9907 }
9908 else
9909 {
9910 IEM_MC_BEGIN(0, 2, 0, 0);
9911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9912 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9913 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9914 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9915 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9916
9917 IEM_MC_PREPARE_FPU_USAGE();
9918 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9919 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9920 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
9921 } IEM_MC_ELSE() {
9922 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
9923 } IEM_MC_ENDIF();
9924
9925 IEM_MC_ADVANCE_RIP_AND_FINISH();
9926 IEM_MC_END();
9927 }
9928}
9929
9930
9931/**
9932 * Common worker for FPU instructions working on ST0 and replaces it with the
9933 * result, i.e. unary operators.
9934 *
9935 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9936 */
9937FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
9938{
9939 IEM_MC_BEGIN(2, 1, 0, 0);
9940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9941 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9942 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9943 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9944
9945 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9946 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9947 IEM_MC_PREPARE_FPU_USAGE();
9948 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9949 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
9950 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9951 } IEM_MC_ELSE() {
9952 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9953 } IEM_MC_ENDIF();
9954 IEM_MC_ADVANCE_RIP_AND_FINISH();
9955
9956 IEM_MC_END();
9957}
9958
9959
9960/** Opcode 0xd9 0xe0. */
9961FNIEMOP_DEF(iemOp_fchs)
9962{
9963 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
9964 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
9965}
9966
9967
9968/** Opcode 0xd9 0xe1. */
9969FNIEMOP_DEF(iemOp_fabs)
9970{
9971 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
9972 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
9973}
9974
9975
9976/** Opcode 0xd9 0xe4. */
9977FNIEMOP_DEF(iemOp_ftst)
9978{
9979 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
9980 IEM_MC_BEGIN(2, 1, 0, 0);
9981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9982 IEM_MC_LOCAL(uint16_t, u16Fsw);
9983 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9984 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9985
9986 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9987 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9988 IEM_MC_PREPARE_FPU_USAGE();
9989 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9990 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
9991 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9992 } IEM_MC_ELSE() {
9993 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9994 } IEM_MC_ENDIF();
9995 IEM_MC_ADVANCE_RIP_AND_FINISH();
9996
9997 IEM_MC_END();
9998}
9999
10000
10001/** Opcode 0xd9 0xe5. */
10002FNIEMOP_DEF(iemOp_fxam)
10003{
10004 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
10005 IEM_MC_BEGIN(2, 1, 0, 0);
10006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10007 IEM_MC_LOCAL(uint16_t, u16Fsw);
10008 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10009 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10010
10011 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10012 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10013 IEM_MC_PREPARE_FPU_USAGE();
10014 IEM_MC_REF_FPUREG(pr80Value, 0);
10015 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
10016 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10017 IEM_MC_ADVANCE_RIP_AND_FINISH();
10018
10019 IEM_MC_END();
10020}
10021
10022
10023/**
10024 * Common worker for FPU instructions pushing a constant onto the FPU stack.
10025 *
10026 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10027 */
10028FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
10029{
10030 IEM_MC_BEGIN(1, 1, 0, 0);
10031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10032 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10033 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10034
10035 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10036 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10037 IEM_MC_PREPARE_FPU_USAGE();
10038 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10039 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
10040 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10041 } IEM_MC_ELSE() {
10042 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
10043 } IEM_MC_ENDIF();
10044 IEM_MC_ADVANCE_RIP_AND_FINISH();
10045
10046 IEM_MC_END();
10047}
10048
10049
10050/** Opcode 0xd9 0xe8. */
10051FNIEMOP_DEF(iemOp_fld1)
10052{
10053 IEMOP_MNEMONIC(fld1, "fld1");
10054 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
10055}
10056
10057
10058/** Opcode 0xd9 0xe9. */
10059FNIEMOP_DEF(iemOp_fldl2t)
10060{
10061 IEMOP_MNEMONIC(fldl2t, "fldl2t");
10062 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
10063}
10064
10065
10066/** Opcode 0xd9 0xea. */
10067FNIEMOP_DEF(iemOp_fldl2e)
10068{
10069 IEMOP_MNEMONIC(fldl2e, "fldl2e");
10070 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
10071}
10072
10073/** Opcode 0xd9 0xeb. */
10074FNIEMOP_DEF(iemOp_fldpi)
10075{
10076 IEMOP_MNEMONIC(fldpi, "fldpi");
10077 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
10078}
10079
10080
10081/** Opcode 0xd9 0xec. */
10082FNIEMOP_DEF(iemOp_fldlg2)
10083{
10084 IEMOP_MNEMONIC(fldlg2, "fldlg2");
10085 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
10086}
10087
10088/** Opcode 0xd9 0xed. */
10089FNIEMOP_DEF(iemOp_fldln2)
10090{
10091 IEMOP_MNEMONIC(fldln2, "fldln2");
10092 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
10093}
10094
10095
10096/** Opcode 0xd9 0xee. */
10097FNIEMOP_DEF(iemOp_fldz)
10098{
10099 IEMOP_MNEMONIC(fldz, "fldz");
10100 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
10101}
10102
10103
10104/** Opcode 0xd9 0xf0.
10105 *
10106 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
10107 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
10108 * to produce proper results for +Inf and -Inf.
10109 *
10110 * This is probably usful in the implementation pow() and similar.
10111 */
10112FNIEMOP_DEF(iemOp_f2xm1)
10113{
10114 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
10115 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
10116}
10117
10118
10119/**
10120 * Common worker for FPU instructions working on STn and ST0, storing the result
10121 * in STn, and popping the stack unless IE, DE or ZE was raised.
10122 *
10123 * @param bRm Mod R/M byte.
10124 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10125 */
10126FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10127{
10128 IEM_MC_BEGIN(3, 1, 0, 0);
10129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10130 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10131 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10132 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10133 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10134
10135 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10136 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10137
10138 IEM_MC_PREPARE_FPU_USAGE();
10139 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10140 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10141 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10142 } IEM_MC_ELSE() {
10143 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10144 } IEM_MC_ENDIF();
10145 IEM_MC_ADVANCE_RIP_AND_FINISH();
10146
10147 IEM_MC_END();
10148}
10149
10150
10151/** Opcode 0xd9 0xf1. */
10152FNIEMOP_DEF(iemOp_fyl2x)
10153{
10154 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
10155 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
10156}
10157
10158
10159/**
10160 * Common worker for FPU instructions working on ST0 and having two outputs, one
10161 * replacing ST0 and one pushed onto the stack.
10162 *
10163 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10164 */
10165FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
10166{
10167 IEM_MC_BEGIN(2, 1, 0, 0);
10168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10169 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
10170 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
10171 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10172
10173 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10174 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10175 IEM_MC_PREPARE_FPU_USAGE();
10176 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10177 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
10178 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
10179 } IEM_MC_ELSE() {
10180 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
10181 } IEM_MC_ENDIF();
10182 IEM_MC_ADVANCE_RIP_AND_FINISH();
10183
10184 IEM_MC_END();
10185}
10186
10187
10188/** Opcode 0xd9 0xf2. */
10189FNIEMOP_DEF(iemOp_fptan)
10190{
10191 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
10192 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
10193}
10194
10195
10196/** Opcode 0xd9 0xf3. */
10197FNIEMOP_DEF(iemOp_fpatan)
10198{
10199 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
10200 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
10201}
10202
10203
10204/** Opcode 0xd9 0xf4. */
10205FNIEMOP_DEF(iemOp_fxtract)
10206{
10207 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
10208 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
10209}
10210
10211
10212/** Opcode 0xd9 0xf5. */
10213FNIEMOP_DEF(iemOp_fprem1)
10214{
10215 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
10216 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
10217}
10218
10219
10220/** Opcode 0xd9 0xf6. */
10221FNIEMOP_DEF(iemOp_fdecstp)
10222{
10223 IEMOP_MNEMONIC(fdecstp, "fdecstp");
10224 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10225 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10226 * FINCSTP and FDECSTP. */
10227 IEM_MC_BEGIN(0, 0, 0, 0);
10228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10229
10230 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10231 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10232
10233 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10234 IEM_MC_FPU_STACK_DEC_TOP();
10235 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10236
10237 IEM_MC_ADVANCE_RIP_AND_FINISH();
10238 IEM_MC_END();
10239}
10240
10241
10242/** Opcode 0xd9 0xf7. */
10243FNIEMOP_DEF(iemOp_fincstp)
10244{
10245 IEMOP_MNEMONIC(fincstp, "fincstp");
10246 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10247 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10248 * FINCSTP and FDECSTP. */
10249 IEM_MC_BEGIN(0, 0, 0, 0);
10250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10251
10252 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10253 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10254
10255 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10256 IEM_MC_FPU_STACK_INC_TOP();
10257 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10258
10259 IEM_MC_ADVANCE_RIP_AND_FINISH();
10260 IEM_MC_END();
10261}
10262
10263
10264/** Opcode 0xd9 0xf8. */
10265FNIEMOP_DEF(iemOp_fprem)
10266{
10267 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
10268 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
10269}
10270
10271
10272/** Opcode 0xd9 0xf9. */
10273FNIEMOP_DEF(iemOp_fyl2xp1)
10274{
10275 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
10276 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
10277}
10278
10279
10280/** Opcode 0xd9 0xfa. */
10281FNIEMOP_DEF(iemOp_fsqrt)
10282{
10283 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
10284 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
10285}
10286
10287
10288/** Opcode 0xd9 0xfb. */
10289FNIEMOP_DEF(iemOp_fsincos)
10290{
10291 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
10292 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
10293}
10294
10295
10296/** Opcode 0xd9 0xfc. */
10297FNIEMOP_DEF(iemOp_frndint)
10298{
10299 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
10300 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
10301}
10302
10303
10304/** Opcode 0xd9 0xfd. */
10305FNIEMOP_DEF(iemOp_fscale)
10306{
10307 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
10308 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
10309}
10310
10311
10312/** Opcode 0xd9 0xfe. */
10313FNIEMOP_DEF(iemOp_fsin)
10314{
10315 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
10316 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
10317}
10318
10319
10320/** Opcode 0xd9 0xff. */
10321FNIEMOP_DEF(iemOp_fcos)
10322{
10323 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
10324 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
10325}
10326
10327
10328/** Used by iemOp_EscF1. */
10329IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
10330{
10331 /* 0xe0 */ iemOp_fchs,
10332 /* 0xe1 */ iemOp_fabs,
10333 /* 0xe2 */ iemOp_Invalid,
10334 /* 0xe3 */ iemOp_Invalid,
10335 /* 0xe4 */ iemOp_ftst,
10336 /* 0xe5 */ iemOp_fxam,
10337 /* 0xe6 */ iemOp_Invalid,
10338 /* 0xe7 */ iemOp_Invalid,
10339 /* 0xe8 */ iemOp_fld1,
10340 /* 0xe9 */ iemOp_fldl2t,
10341 /* 0xea */ iemOp_fldl2e,
10342 /* 0xeb */ iemOp_fldpi,
10343 /* 0xec */ iemOp_fldlg2,
10344 /* 0xed */ iemOp_fldln2,
10345 /* 0xee */ iemOp_fldz,
10346 /* 0xef */ iemOp_Invalid,
10347 /* 0xf0 */ iemOp_f2xm1,
10348 /* 0xf1 */ iemOp_fyl2x,
10349 /* 0xf2 */ iemOp_fptan,
10350 /* 0xf3 */ iemOp_fpatan,
10351 /* 0xf4 */ iemOp_fxtract,
10352 /* 0xf5 */ iemOp_fprem1,
10353 /* 0xf6 */ iemOp_fdecstp,
10354 /* 0xf7 */ iemOp_fincstp,
10355 /* 0xf8 */ iemOp_fprem,
10356 /* 0xf9 */ iemOp_fyl2xp1,
10357 /* 0xfa */ iemOp_fsqrt,
10358 /* 0xfb */ iemOp_fsincos,
10359 /* 0xfc */ iemOp_frndint,
10360 /* 0xfd */ iemOp_fscale,
10361 /* 0xfe */ iemOp_fsin,
10362 /* 0xff */ iemOp_fcos
10363};
10364
10365
10366/**
10367 * @opcode 0xd9
10368 */
10369FNIEMOP_DEF(iemOp_EscF1)
10370{
10371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10372 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
10373
10374 if (IEM_IS_MODRM_REG_MODE(bRm))
10375 {
10376 switch (IEM_GET_MODRM_REG_8(bRm))
10377 {
10378 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
10379 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
10380 case 2:
10381 if (bRm == 0xd0)
10382 return FNIEMOP_CALL(iemOp_fnop);
10383 IEMOP_RAISE_INVALID_OPCODE_RET();
10384 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
10385 case 4:
10386 case 5:
10387 case 6:
10388 case 7:
10389 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
10390 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
10391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10392 }
10393 }
10394 else
10395 {
10396 switch (IEM_GET_MODRM_REG_8(bRm))
10397 {
10398 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
10399 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
10400 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
10401 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
10402 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
10403 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
10404 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
10405 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
10406 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10407 }
10408 }
10409}
10410
10411
10412/** Opcode 0xda 11/0. */
10413FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
10414{
10415 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
10416 IEM_MC_BEGIN(0, 1, 0, 0);
10417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10418 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10419
10420 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10421 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10422
10423 IEM_MC_PREPARE_FPU_USAGE();
10424 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10425 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10426 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10427 } IEM_MC_ENDIF();
10428 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10429 } IEM_MC_ELSE() {
10430 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10431 } IEM_MC_ENDIF();
10432 IEM_MC_ADVANCE_RIP_AND_FINISH();
10433
10434 IEM_MC_END();
10435}
10436
10437
10438/** Opcode 0xda 11/1. */
10439FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
10440{
10441 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
10442 IEM_MC_BEGIN(0, 1, 0, 0);
10443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10444 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10445
10446 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10447 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10448
10449 IEM_MC_PREPARE_FPU_USAGE();
10450 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10451 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10452 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10453 } IEM_MC_ENDIF();
10454 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10455 } IEM_MC_ELSE() {
10456 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10457 } IEM_MC_ENDIF();
10458 IEM_MC_ADVANCE_RIP_AND_FINISH();
10459
10460 IEM_MC_END();
10461}
10462
10463
10464/** Opcode 0xda 11/2. */
10465FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
10466{
10467 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
10468 IEM_MC_BEGIN(0, 1, 0, 0);
10469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10470 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10471
10472 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10473 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10474
10475 IEM_MC_PREPARE_FPU_USAGE();
10476 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10477 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10478 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10479 } IEM_MC_ENDIF();
10480 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10481 } IEM_MC_ELSE() {
10482 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10483 } IEM_MC_ENDIF();
10484 IEM_MC_ADVANCE_RIP_AND_FINISH();
10485
10486 IEM_MC_END();
10487}
10488
10489
10490/** Opcode 0xda 11/3. */
10491FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
10492{
10493 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
10494 IEM_MC_BEGIN(0, 1, 0, 0);
10495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10496 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10497
10498 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10499 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10500
10501 IEM_MC_PREPARE_FPU_USAGE();
10502 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10503 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10504 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10505 } IEM_MC_ENDIF();
10506 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10507 } IEM_MC_ELSE() {
10508 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10509 } IEM_MC_ENDIF();
10510 IEM_MC_ADVANCE_RIP_AND_FINISH();
10511
10512 IEM_MC_END();
10513}
10514
10515
10516/**
10517 * Common worker for FPU instructions working on ST0 and ST1, only affecting
10518 * flags, and popping twice when done.
10519 *
10520 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10521 */
10522FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10523{
10524 IEM_MC_BEGIN(3, 1, 0, 0);
10525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10526 IEM_MC_LOCAL(uint16_t, u16Fsw);
10527 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10528 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10529 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10530
10531 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10532 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10533
10534 IEM_MC_PREPARE_FPU_USAGE();
10535 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
10536 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10537 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10538 } IEM_MC_ELSE() {
10539 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
10540 } IEM_MC_ENDIF();
10541 IEM_MC_ADVANCE_RIP_AND_FINISH();
10542
10543 IEM_MC_END();
10544}
10545
10546
10547/** Opcode 0xda 0xe9. */
10548FNIEMOP_DEF(iemOp_fucompp)
10549{
10550 IEMOP_MNEMONIC(fucompp, "fucompp");
10551 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
10552}
10553
10554
10555/**
10556 * Common worker for FPU instructions working on ST0 and an m32i, and storing
10557 * the result in ST0.
10558 *
10559 * @param bRm Mod R/M byte.
10560 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10561 */
10562FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
10563{
10564 IEM_MC_BEGIN(3, 3, 0, 0);
10565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10566 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10567 IEM_MC_LOCAL(int32_t, i32Val2);
10568 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10569 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10570 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10571
10572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10574
10575 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10576 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10577 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10578
10579 IEM_MC_PREPARE_FPU_USAGE();
10580 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10581 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
10582 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10583 } IEM_MC_ELSE() {
10584 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10585 } IEM_MC_ENDIF();
10586 IEM_MC_ADVANCE_RIP_AND_FINISH();
10587
10588 IEM_MC_END();
10589}
10590
10591
10592/** Opcode 0xda !11/0. */
10593FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
10594{
10595 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
10596 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
10597}
10598
10599
10600/** Opcode 0xda !11/1. */
10601FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
10602{
10603 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
10604 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
10605}
10606
10607
10608/** Opcode 0xda !11/2. */
10609FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
10610{
10611 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
10612
10613 IEM_MC_BEGIN(3, 3, 0, 0);
10614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10615 IEM_MC_LOCAL(uint16_t, u16Fsw);
10616 IEM_MC_LOCAL(int32_t, i32Val2);
10617 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10618 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10619 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10620
10621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10623
10624 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10625 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10626 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10627
10628 IEM_MC_PREPARE_FPU_USAGE();
10629 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10630 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10631 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10632 } IEM_MC_ELSE() {
10633 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10634 } IEM_MC_ENDIF();
10635 IEM_MC_ADVANCE_RIP_AND_FINISH();
10636
10637 IEM_MC_END();
10638}
10639
10640
10641/** Opcode 0xda !11/3. */
10642FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
10643{
10644 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
10645
10646 IEM_MC_BEGIN(3, 3, 0, 0);
10647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10648 IEM_MC_LOCAL(uint16_t, u16Fsw);
10649 IEM_MC_LOCAL(int32_t, i32Val2);
10650 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10651 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10652 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10653
10654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10656
10657 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10658 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10659 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10660
10661 IEM_MC_PREPARE_FPU_USAGE();
10662 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10663 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10664 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10665 } IEM_MC_ELSE() {
10666 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10667 } IEM_MC_ENDIF();
10668 IEM_MC_ADVANCE_RIP_AND_FINISH();
10669
10670 IEM_MC_END();
10671}
10672
10673
10674/** Opcode 0xda !11/4. */
10675FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
10676{
10677 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
10678 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
10679}
10680
10681
10682/** Opcode 0xda !11/5. */
10683FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
10684{
10685 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
10686 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
10687}
10688
10689
10690/** Opcode 0xda !11/6. */
10691FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
10692{
10693 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
10694 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
10695}
10696
10697
10698/** Opcode 0xda !11/7. */
10699FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
10700{
10701 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
10702 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
10703}
10704
10705
10706/**
10707 * @opcode 0xda
10708 */
10709FNIEMOP_DEF(iemOp_EscF2)
10710{
10711 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10712 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
10713 if (IEM_IS_MODRM_REG_MODE(bRm))
10714 {
10715 switch (IEM_GET_MODRM_REG_8(bRm))
10716 {
10717 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
10718 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
10719 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
10720 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
10721 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10722 case 5:
10723 if (bRm == 0xe9)
10724 return FNIEMOP_CALL(iemOp_fucompp);
10725 IEMOP_RAISE_INVALID_OPCODE_RET();
10726 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10727 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10728 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10729 }
10730 }
10731 else
10732 {
10733 switch (IEM_GET_MODRM_REG_8(bRm))
10734 {
10735 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
10736 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
10737 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
10738 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
10739 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
10740 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
10741 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
10742 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
10743 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10744 }
10745 }
10746}
10747
10748
10749/** Opcode 0xdb !11/0. */
10750FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
10751{
10752 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
10753
10754 IEM_MC_BEGIN(2, 3, 0, 0);
10755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10756 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10757 IEM_MC_LOCAL(int32_t, i32Val);
10758 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10759 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
10760
10761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10763
10764 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10765 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10766 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10767
10768 IEM_MC_PREPARE_FPU_USAGE();
10769 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10770 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
10771 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10772 } IEM_MC_ELSE() {
10773 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10774 } IEM_MC_ENDIF();
10775 IEM_MC_ADVANCE_RIP_AND_FINISH();
10776
10777 IEM_MC_END();
10778}
10779
10780
10781/** Opcode 0xdb !11/1. */
10782FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
10783{
10784 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
10785 IEM_MC_BEGIN(3, 2, 0, 0);
10786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10787 IEM_MC_LOCAL(uint16_t, u16Fsw);
10788 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10789 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10790 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10791
10792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10794 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10795 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10796
10797 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10798 IEM_MC_PREPARE_FPU_USAGE();
10799 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10800 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10801 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10802 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10803 } IEM_MC_ELSE() {
10804 IEM_MC_IF_FCW_IM() {
10805 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10806 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10807 } IEM_MC_ENDIF();
10808 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10809 } IEM_MC_ENDIF();
10810 IEM_MC_ADVANCE_RIP_AND_FINISH();
10811
10812 IEM_MC_END();
10813}
10814
10815
10816/** Opcode 0xdb !11/2. */
10817FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
10818{
10819 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
10820 IEM_MC_BEGIN(3, 2, 0, 0);
10821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10822 IEM_MC_LOCAL(uint16_t, u16Fsw);
10823 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10824 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10825 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10826
10827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10829 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10830 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10831
10832 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10833 IEM_MC_PREPARE_FPU_USAGE();
10834 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10835 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10836 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10837 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10838 } IEM_MC_ELSE() {
10839 IEM_MC_IF_FCW_IM() {
10840 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10841 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10842 } IEM_MC_ENDIF();
10843 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10844 } IEM_MC_ENDIF();
10845 IEM_MC_ADVANCE_RIP_AND_FINISH();
10846
10847 IEM_MC_END();
10848}
10849
10850
10851/** Opcode 0xdb !11/3. */
10852FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
10853{
10854 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
10855 IEM_MC_BEGIN(3, 2, 0, 0);
10856 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10857 IEM_MC_LOCAL(uint16_t, u16Fsw);
10858 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10859 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10860 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10861
10862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10864 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10865 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10866
10867 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10868 IEM_MC_PREPARE_FPU_USAGE();
10869 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10870 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10871 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10872 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10873 } IEM_MC_ELSE() {
10874 IEM_MC_IF_FCW_IM() {
10875 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10876 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10877 } IEM_MC_ENDIF();
10878 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10879 } IEM_MC_ENDIF();
10880 IEM_MC_ADVANCE_RIP_AND_FINISH();
10881
10882 IEM_MC_END();
10883}
10884
10885
10886/** Opcode 0xdb !11/5. */
10887FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
10888{
10889 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
10890
10891 IEM_MC_BEGIN(2, 3, 0, 0);
10892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10893 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10894 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
10895 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10896 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
10897
10898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10900
10901 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10902 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10903 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10904
10905 IEM_MC_PREPARE_FPU_USAGE();
10906 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10907 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
10908 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10909 } IEM_MC_ELSE() {
10910 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10911 } IEM_MC_ENDIF();
10912 IEM_MC_ADVANCE_RIP_AND_FINISH();
10913
10914 IEM_MC_END();
10915}
10916
10917
10918/** Opcode 0xdb !11/7. */
10919FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
10920{
10921 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
10922 IEM_MC_BEGIN(3, 2, 0, 0);
10923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10924 IEM_MC_LOCAL(uint16_t, u16Fsw);
10925 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10926 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
10927 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10928
10929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10931 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10932 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10933
10934 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
10935 IEM_MC_PREPARE_FPU_USAGE();
10936 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10937 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
10938 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
10939 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10940 } IEM_MC_ELSE() {
10941 IEM_MC_IF_FCW_IM() {
10942 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
10943 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
10944 } IEM_MC_ENDIF();
10945 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10946 } IEM_MC_ENDIF();
10947 IEM_MC_ADVANCE_RIP_AND_FINISH();
10948
10949 IEM_MC_END();
10950}
10951
10952
10953/** Opcode 0xdb 11/0. */
10954FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
10955{
10956 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
10957 IEM_MC_BEGIN(0, 1, 0, 0);
10958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10959 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10960
10961 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10962 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10963
10964 IEM_MC_PREPARE_FPU_USAGE();
10965 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10966 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
10967 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10968 } IEM_MC_ENDIF();
10969 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10970 } IEM_MC_ELSE() {
10971 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10972 } IEM_MC_ENDIF();
10973 IEM_MC_ADVANCE_RIP_AND_FINISH();
10974
10975 IEM_MC_END();
10976}
10977
10978
10979/** Opcode 0xdb 11/1. */
10980FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
10981{
10982 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
10983 IEM_MC_BEGIN(0, 1, 0, 0);
10984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10985 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10986
10987 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10988 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10989
10990 IEM_MC_PREPARE_FPU_USAGE();
10991 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10992 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10993 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10994 } IEM_MC_ENDIF();
10995 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10996 } IEM_MC_ELSE() {
10997 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10998 } IEM_MC_ENDIF();
10999 IEM_MC_ADVANCE_RIP_AND_FINISH();
11000
11001 IEM_MC_END();
11002}
11003
11004
11005/** Opcode 0xdb 11/2. */
11006FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
11007{
11008 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
11009 IEM_MC_BEGIN(0, 1, 0, 0);
11010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11011 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11012
11013 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11014 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11015
11016 IEM_MC_PREPARE_FPU_USAGE();
11017 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11018 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11019 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11020 } IEM_MC_ENDIF();
11021 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11022 } IEM_MC_ELSE() {
11023 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11024 } IEM_MC_ENDIF();
11025 IEM_MC_ADVANCE_RIP_AND_FINISH();
11026
11027 IEM_MC_END();
11028}
11029
11030
11031/** Opcode 0xdb 11/3. */
11032FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
11033{
11034 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
11035 IEM_MC_BEGIN(0, 1, 0, 0);
11036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11037 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11038
11039 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11040 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11041
11042 IEM_MC_PREPARE_FPU_USAGE();
11043 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11044 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
11045 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11046 } IEM_MC_ENDIF();
11047 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11048 } IEM_MC_ELSE() {
11049 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11050 } IEM_MC_ENDIF();
11051 IEM_MC_ADVANCE_RIP_AND_FINISH();
11052
11053 IEM_MC_END();
11054}
11055
11056
11057/** Opcode 0xdb 0xe0. */
11058FNIEMOP_DEF(iemOp_fneni)
11059{
11060 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
11061 IEM_MC_BEGIN(0, 0, 0, 0);
11062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11063 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11064 IEM_MC_ADVANCE_RIP_AND_FINISH();
11065 IEM_MC_END();
11066}
11067
11068
11069/** Opcode 0xdb 0xe1. */
11070FNIEMOP_DEF(iemOp_fndisi)
11071{
11072 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
11073 IEM_MC_BEGIN(0, 0, 0, 0);
11074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11075 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11076 IEM_MC_ADVANCE_RIP_AND_FINISH();
11077 IEM_MC_END();
11078}
11079
11080
11081/** Opcode 0xdb 0xe2. */
11082FNIEMOP_DEF(iemOp_fnclex)
11083{
11084 IEMOP_MNEMONIC(fnclex, "fnclex");
11085 IEM_MC_BEGIN(0, 0, 0, 0);
11086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11087 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11088 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11089 IEM_MC_CLEAR_FSW_EX();
11090 IEM_MC_ADVANCE_RIP_AND_FINISH();
11091 IEM_MC_END();
11092}
11093
11094
11095/** Opcode 0xdb 0xe3. */
11096FNIEMOP_DEF(iemOp_fninit)
11097{
11098 IEMOP_MNEMONIC(fninit, "fninit");
11099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11100 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, 0, iemCImpl_finit, false /*fCheckXcpts*/);
11101}
11102
11103
11104/** Opcode 0xdb 0xe4. */
11105FNIEMOP_DEF(iemOp_fnsetpm)
11106{
11107 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
11108 IEM_MC_BEGIN(0, 0, 0, 0);
11109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11110 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11111 IEM_MC_ADVANCE_RIP_AND_FINISH();
11112 IEM_MC_END();
11113}
11114
11115
11116/** Opcode 0xdb 0xe5. */
11117FNIEMOP_DEF(iemOp_frstpm)
11118{
11119 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
11120#if 0 /* #UDs on newer CPUs */
11121 IEM_MC_BEGIN(0, 0, 0, 0);
11122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11123 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11124 IEM_MC_ADVANCE_RIP_AND_FINISH();
11125 IEM_MC_END();
11126 return VINF_SUCCESS;
11127#else
11128 IEMOP_RAISE_INVALID_OPCODE_RET();
11129#endif
11130}
11131
11132
11133/** Opcode 0xdb 11/5. */
11134FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
11135{
11136 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
11137 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11138 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
11139 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11140}
11141
11142
11143/** Opcode 0xdb 11/6. */
11144FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
11145{
11146 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
11147 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11148 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11149 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11150}
11151
11152
11153/**
11154 * @opcode 0xdb
11155 */
11156FNIEMOP_DEF(iemOp_EscF3)
11157{
11158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11159 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
11160 if (IEM_IS_MODRM_REG_MODE(bRm))
11161 {
11162 switch (IEM_GET_MODRM_REG_8(bRm))
11163 {
11164 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
11165 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
11166 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
11167 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
11168 case 4:
11169 switch (bRm)
11170 {
11171 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
11172 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
11173 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
11174 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
11175 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
11176 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
11177 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
11178 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
11179 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11180 }
11181 break;
11182 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
11183 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
11184 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11185 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11186 }
11187 }
11188 else
11189 {
11190 switch (IEM_GET_MODRM_REG_8(bRm))
11191 {
11192 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
11193 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
11194 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
11195 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
11196 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11197 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
11198 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11199 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
11200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11201 }
11202 }
11203}
11204
11205
11206/**
11207 * Common worker for FPU instructions working on STn and ST0, and storing the
11208 * result in STn unless IE, DE or ZE was raised.
11209 *
11210 * @param bRm Mod R/M byte.
11211 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11212 */
11213FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11214{
11215 IEM_MC_BEGIN(3, 1, 0, 0);
11216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11217 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11218 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11219 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11220 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11221
11222 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11223 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11224
11225 IEM_MC_PREPARE_FPU_USAGE();
11226 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
11227 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11228 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11229 } IEM_MC_ELSE() {
11230 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11231 } IEM_MC_ENDIF();
11232 IEM_MC_ADVANCE_RIP_AND_FINISH();
11233
11234 IEM_MC_END();
11235}
11236
11237
11238/** Opcode 0xdc 11/0. */
11239FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
11240{
11241 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
11242 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
11243}
11244
11245
11246/** Opcode 0xdc 11/1. */
11247FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
11248{
11249 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
11250 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
11251}
11252
11253
11254/** Opcode 0xdc 11/4. */
11255FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
11256{
11257 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
11258 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
11259}
11260
11261
11262/** Opcode 0xdc 11/5. */
11263FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
11264{
11265 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
11266 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
11267}
11268
11269
11270/** Opcode 0xdc 11/6. */
11271FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
11272{
11273 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
11274 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
11275}
11276
11277
11278/** Opcode 0xdc 11/7. */
11279FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
11280{
11281 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
11282 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
11283}
11284
11285
11286/**
11287 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
11288 * memory operand, and storing the result in ST0.
11289 *
11290 * @param bRm Mod R/M byte.
11291 * @param pfnImpl Pointer to the instruction implementation (assembly).
11292 */
11293FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
11294{
11295 IEM_MC_BEGIN(3, 3, 0, 0);
11296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11297 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11298 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
11299 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11300 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
11301 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
11302
11303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11305 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11306 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11307
11308 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11309 IEM_MC_PREPARE_FPU_USAGE();
11310 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
11311 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
11312 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11313 } IEM_MC_ELSE() {
11314 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11315 } IEM_MC_ENDIF();
11316 IEM_MC_ADVANCE_RIP_AND_FINISH();
11317
11318 IEM_MC_END();
11319}
11320
11321
11322/** Opcode 0xdc !11/0. */
11323FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
11324{
11325 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
11326 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
11327}
11328
11329
11330/** Opcode 0xdc !11/1. */
11331FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
11332{
11333 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
11334 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
11335}
11336
11337
11338/** Opcode 0xdc !11/2. */
11339FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
11340{
11341 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
11342
11343 IEM_MC_BEGIN(3, 3, 0, 0);
11344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11345 IEM_MC_LOCAL(uint16_t, u16Fsw);
11346 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11347 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11348 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11349 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11350
11351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11353
11354 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11355 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11356 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11357
11358 IEM_MC_PREPARE_FPU_USAGE();
11359 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11360 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11361 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11362 } IEM_MC_ELSE() {
11363 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11364 } IEM_MC_ENDIF();
11365 IEM_MC_ADVANCE_RIP_AND_FINISH();
11366
11367 IEM_MC_END();
11368}
11369
11370
11371/** Opcode 0xdc !11/3. */
11372FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
11373{
11374 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
11375
11376 IEM_MC_BEGIN(3, 3, 0, 0);
11377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11378 IEM_MC_LOCAL(uint16_t, u16Fsw);
11379 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11380 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11381 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11382 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11383
11384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11386
11387 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11388 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11389 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11390
11391 IEM_MC_PREPARE_FPU_USAGE();
11392 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11393 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11394 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11395 } IEM_MC_ELSE() {
11396 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11397 } IEM_MC_ENDIF();
11398 IEM_MC_ADVANCE_RIP_AND_FINISH();
11399
11400 IEM_MC_END();
11401}
11402
11403
11404/** Opcode 0xdc !11/4. */
11405FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
11406{
11407 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
11408 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
11409}
11410
11411
11412/** Opcode 0xdc !11/5. */
11413FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
11414{
11415 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
11416 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
11417}
11418
11419
11420/** Opcode 0xdc !11/6. */
11421FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
11422{
11423 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
11424 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
11425}
11426
11427
11428/** Opcode 0xdc !11/7. */
11429FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
11430{
11431 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
11432 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
11433}
11434
11435
11436/**
11437 * @opcode 0xdc
11438 */
11439FNIEMOP_DEF(iemOp_EscF4)
11440{
11441 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11442 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
11443 if (IEM_IS_MODRM_REG_MODE(bRm))
11444 {
11445 switch (IEM_GET_MODRM_REG_8(bRm))
11446 {
11447 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
11448 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
11449 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
11450 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
11451 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
11452 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
11453 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
11454 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
11455 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11456 }
11457 }
11458 else
11459 {
11460 switch (IEM_GET_MODRM_REG_8(bRm))
11461 {
11462 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
11463 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
11464 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
11465 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
11466 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
11467 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
11468 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
11469 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
11470 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11471 }
11472 }
11473}
11474
11475
11476/** Opcode 0xdd !11/0.
11477 * @sa iemOp_fld_m32r */
11478FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
11479{
11480 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
11481
11482 IEM_MC_BEGIN(2, 3, 0, 0);
11483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11484 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11485 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
11486 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11487 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
11488
11489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11491 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11492 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11493
11494 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11495 IEM_MC_PREPARE_FPU_USAGE();
11496 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11497 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
11498 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11499 } IEM_MC_ELSE() {
11500 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11501 } IEM_MC_ENDIF();
11502 IEM_MC_ADVANCE_RIP_AND_FINISH();
11503
11504 IEM_MC_END();
11505}
11506
11507
11508/** Opcode 0xdd !11/0. */
11509FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
11510{
11511 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
11512 IEM_MC_BEGIN(3, 2, 0, 0);
11513 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11514 IEM_MC_LOCAL(uint16_t, u16Fsw);
11515 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11516 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11517 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11518
11519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11521 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11522 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11523
11524 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11525 IEM_MC_PREPARE_FPU_USAGE();
11526 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11527 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11528 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11529 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11530 } IEM_MC_ELSE() {
11531 IEM_MC_IF_FCW_IM() {
11532 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11533 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
11534 } IEM_MC_ENDIF();
11535 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11536 } IEM_MC_ENDIF();
11537 IEM_MC_ADVANCE_RIP_AND_FINISH();
11538
11539 IEM_MC_END();
11540}
11541
11542
11543/** Opcode 0xdd !11/0. */
11544FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
11545{
11546 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
11547 IEM_MC_BEGIN(3, 2, 0, 0);
11548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11549 IEM_MC_LOCAL(uint16_t, u16Fsw);
11550 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11551 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11552 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11553
11554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11556 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11557 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11558
11559 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11560 IEM_MC_PREPARE_FPU_USAGE();
11561 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11562 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11563 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11564 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11565 } IEM_MC_ELSE() {
11566 IEM_MC_IF_FCW_IM() {
11567 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11568 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11569 } IEM_MC_ENDIF();
11570 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11571 } IEM_MC_ENDIF();
11572 IEM_MC_ADVANCE_RIP_AND_FINISH();
11573
11574 IEM_MC_END();
11575}
11576
11577
11578
11579
11580/** Opcode 0xdd !11/0. */
11581FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
11582{
11583 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
11584 IEM_MC_BEGIN(3, 2, 0, 0);
11585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11586 IEM_MC_LOCAL(uint16_t, u16Fsw);
11587 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11588 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11589 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11590
11591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11593 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11594 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11595
11596 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11597 IEM_MC_PREPARE_FPU_USAGE();
11598 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11599 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11600 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11601 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11602 } IEM_MC_ELSE() {
11603 IEM_MC_IF_FCW_IM() {
11604 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11605 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11606 } IEM_MC_ENDIF();
11607 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11608 } IEM_MC_ENDIF();
11609 IEM_MC_ADVANCE_RIP_AND_FINISH();
11610
11611 IEM_MC_END();
11612}
11613
11614
11615/** Opcode 0xdd !11/0. */
11616FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
11617{
11618 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
11619 IEM_MC_BEGIN(3, 0, 0, 0);
11620 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
11621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11622
11623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11624 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11625 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11626
11627 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11628 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11629 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
11630 IEM_MC_END();
11631}
11632
11633
11634/** Opcode 0xdd !11/0. */
11635FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
11636{
11637 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
11638 IEM_MC_BEGIN(3, 0, 0, 0);
11639 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
11640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11641
11642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11643 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11644 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
11645
11646 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11647 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11648 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
11649 IEM_MC_END();
11650}
11651
11652/** Opcode 0xdd !11/0. */
11653FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
11654{
11655 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
11656
11657 IEM_MC_BEGIN(0, 2, 0, 0);
11658 IEM_MC_LOCAL(uint16_t, u16Tmp);
11659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11660
11661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11663 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11664
11665 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11666 IEM_MC_FETCH_FSW(u16Tmp);
11667 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
11668 IEM_MC_ADVANCE_RIP_AND_FINISH();
11669
11670/** @todo Debug / drop a hint to the verifier that things may differ
11671 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
11672 * NT4SP1. (X86_FSW_PE) */
11673 IEM_MC_END();
11674}
11675
11676
11677/** Opcode 0xdd 11/0. */
11678FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
11679{
11680 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
11681 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
11682 unmodified. */
11683 IEM_MC_BEGIN(0, 0, 0, 0);
11684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11685
11686 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11687 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11688
11689 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11690 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11691 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11692
11693 IEM_MC_ADVANCE_RIP_AND_FINISH();
11694 IEM_MC_END();
11695}
11696
11697
11698/** Opcode 0xdd 11/1. */
11699FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
11700{
11701 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
11702 IEM_MC_BEGIN(0, 2, 0, 0);
11703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11704 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11705 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11706 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11707 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11708
11709 IEM_MC_PREPARE_FPU_USAGE();
11710 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11711 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11712 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11713 } IEM_MC_ELSE() {
11714 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11715 } IEM_MC_ENDIF();
11716
11717 IEM_MC_ADVANCE_RIP_AND_FINISH();
11718 IEM_MC_END();
11719}
11720
11721
11722/** Opcode 0xdd 11/3. */
11723FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
11724{
11725 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
11726 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
11727}
11728
11729
11730/** Opcode 0xdd 11/4. */
11731FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
11732{
11733 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
11734 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
11735}
11736
11737
11738/**
11739 * @opcode 0xdd
11740 */
11741FNIEMOP_DEF(iemOp_EscF5)
11742{
11743 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11744 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
11745 if (IEM_IS_MODRM_REG_MODE(bRm))
11746 {
11747 switch (IEM_GET_MODRM_REG_8(bRm))
11748 {
11749 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
11750 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
11751 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
11752 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
11753 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
11754 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
11755 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11756 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11758 }
11759 }
11760 else
11761 {
11762 switch (IEM_GET_MODRM_REG_8(bRm))
11763 {
11764 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
11765 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
11766 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
11767 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
11768 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
11769 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
11770 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
11771 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
11772 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11773 }
11774 }
11775}
11776
11777
11778/** Opcode 0xde 11/0. */
11779FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
11780{
11781 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
11782 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
11783}
11784
11785
11786/** Opcode 0xde 11/0. */
11787FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
11788{
11789 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
11790 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
11791}
11792
11793
11794/** Opcode 0xde 0xd9. */
11795FNIEMOP_DEF(iemOp_fcompp)
11796{
11797 IEMOP_MNEMONIC(fcompp, "fcompp");
11798 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
11799}
11800
11801
11802/** Opcode 0xde 11/4. */
11803FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
11804{
11805 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
11806 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
11807}
11808
11809
11810/** Opcode 0xde 11/5. */
11811FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
11812{
11813 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
11814 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
11815}
11816
11817
11818/** Opcode 0xde 11/6. */
11819FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
11820{
11821 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
11822 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
11823}
11824
11825
11826/** Opcode 0xde 11/7. */
11827FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
11828{
11829 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
11830 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
11831}
11832
11833
11834/**
11835 * Common worker for FPU instructions working on ST0 and an m16i, and storing
11836 * the result in ST0.
11837 *
11838 * @param bRm Mod R/M byte.
11839 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11840 */
11841FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
11842{
11843 IEM_MC_BEGIN(3, 3, 0, 0);
11844 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11845 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11846 IEM_MC_LOCAL(int16_t, i16Val2);
11847 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11848 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11849 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11850
11851 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11853
11854 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11855 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11856 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11857
11858 IEM_MC_PREPARE_FPU_USAGE();
11859 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11860 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
11861 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11862 } IEM_MC_ELSE() {
11863 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11864 } IEM_MC_ENDIF();
11865 IEM_MC_ADVANCE_RIP_AND_FINISH();
11866
11867 IEM_MC_END();
11868}
11869
11870
11871/** Opcode 0xde !11/0. */
11872FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
11873{
11874 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
11875 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
11876}
11877
11878
11879/** Opcode 0xde !11/1. */
11880FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
11881{
11882 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
11883 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
11884}
11885
11886
11887/** Opcode 0xde !11/2. */
11888FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
11889{
11890 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
11891
11892 IEM_MC_BEGIN(3, 3, 0, 0);
11893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11894 IEM_MC_LOCAL(uint16_t, u16Fsw);
11895 IEM_MC_LOCAL(int16_t, i16Val2);
11896 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11897 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11898 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11899
11900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11902
11903 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11904 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11905 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11906
11907 IEM_MC_PREPARE_FPU_USAGE();
11908 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11909 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11910 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11911 } IEM_MC_ELSE() {
11912 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11913 } IEM_MC_ENDIF();
11914 IEM_MC_ADVANCE_RIP_AND_FINISH();
11915
11916 IEM_MC_END();
11917}
11918
11919
11920/** Opcode 0xde !11/3. */
11921FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
11922{
11923 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
11924
11925 IEM_MC_BEGIN(3, 3, 0, 0);
11926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11927 IEM_MC_LOCAL(uint16_t, u16Fsw);
11928 IEM_MC_LOCAL(int16_t, i16Val2);
11929 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11930 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11931 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11932
11933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11935
11936 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11937 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11938 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11939
11940 IEM_MC_PREPARE_FPU_USAGE();
11941 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11942 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11943 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11944 } IEM_MC_ELSE() {
11945 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11946 } IEM_MC_ENDIF();
11947 IEM_MC_ADVANCE_RIP_AND_FINISH();
11948
11949 IEM_MC_END();
11950}
11951
11952
11953/** Opcode 0xde !11/4. */
11954FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
11955{
11956 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
11957 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
11958}
11959
11960
11961/** Opcode 0xde !11/5. */
11962FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
11963{
11964 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
11965 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
11966}
11967
11968
11969/** Opcode 0xde !11/6. */
11970FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
11971{
11972 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
11973 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
11974}
11975
11976
11977/** Opcode 0xde !11/7. */
11978FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
11979{
11980 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
11981 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
11982}
11983
11984
11985/**
11986 * @opcode 0xde
11987 */
11988FNIEMOP_DEF(iemOp_EscF6)
11989{
11990 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11991 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
11992 if (IEM_IS_MODRM_REG_MODE(bRm))
11993 {
11994 switch (IEM_GET_MODRM_REG_8(bRm))
11995 {
11996 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
11997 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
11998 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
11999 case 3: if (bRm == 0xd9)
12000 return FNIEMOP_CALL(iemOp_fcompp);
12001 IEMOP_RAISE_INVALID_OPCODE_RET();
12002 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
12003 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
12004 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
12005 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
12006 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12007 }
12008 }
12009 else
12010 {
12011 switch (IEM_GET_MODRM_REG_8(bRm))
12012 {
12013 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
12014 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
12015 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
12016 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
12017 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
12018 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
12019 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
12020 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
12021 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12022 }
12023 }
12024}
12025
12026
12027/** Opcode 0xdf 11/0.
12028 * Undocument instruction, assumed to work like ffree + fincstp. */
12029FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
12030{
12031 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
12032 IEM_MC_BEGIN(0, 0, 0, 0);
12033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12034
12035 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12036 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12037
12038 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12039 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12040 IEM_MC_FPU_STACK_INC_TOP();
12041 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12042
12043 IEM_MC_ADVANCE_RIP_AND_FINISH();
12044 IEM_MC_END();
12045}
12046
12047
12048/** Opcode 0xdf 0xe0. */
12049FNIEMOP_DEF(iemOp_fnstsw_ax)
12050{
12051 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
12052 IEM_MC_BEGIN(0, 1, 0, 0);
12053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12054 IEM_MC_LOCAL(uint16_t, u16Tmp);
12055 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12056 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12057 IEM_MC_FETCH_FSW(u16Tmp);
12058 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12059 IEM_MC_ADVANCE_RIP_AND_FINISH();
12060 IEM_MC_END();
12061}
12062
12063
12064/** Opcode 0xdf 11/5. */
12065FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
12066{
12067 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
12068 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12069 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12070 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12071}
12072
12073
12074/** Opcode 0xdf 11/6. */
12075FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
12076{
12077 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
12078 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12079 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12080 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12081}
12082
12083
12084/** Opcode 0xdf !11/0. */
12085FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
12086{
12087 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
12088
12089 IEM_MC_BEGIN(2, 3, 0, 0);
12090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12091 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12092 IEM_MC_LOCAL(int16_t, i16Val);
12093 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12094 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
12095
12096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12098
12099 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12100 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12101 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12102
12103 IEM_MC_PREPARE_FPU_USAGE();
12104 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12105 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
12106 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12107 } IEM_MC_ELSE() {
12108 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12109 } IEM_MC_ENDIF();
12110 IEM_MC_ADVANCE_RIP_AND_FINISH();
12111
12112 IEM_MC_END();
12113}
12114
12115
12116/** Opcode 0xdf !11/1. */
12117FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
12118{
12119 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
12120 IEM_MC_BEGIN(3, 2, 0, 0);
12121 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12122 IEM_MC_LOCAL(uint16_t, u16Fsw);
12123 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12124 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12125 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12126
12127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12129 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12130 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12131
12132 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
12133 IEM_MC_PREPARE_FPU_USAGE();
12134 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12135 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12136 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
12137 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12138 } IEM_MC_ELSE() {
12139 IEM_MC_IF_FCW_IM() {
12140 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12141 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
12142 } IEM_MC_ENDIF();
12143 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12144 } IEM_MC_ENDIF();
12145 IEM_MC_ADVANCE_RIP_AND_FINISH();
12146
12147 IEM_MC_END();
12148}
12149
12150
12151/** Opcode 0xdf !11/2. */
12152FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
12153{
12154 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
12155 IEM_MC_BEGIN(3, 2, 0, 0);
12156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12157 IEM_MC_LOCAL(uint16_t, u16Fsw);
12158 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12159 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12160 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12161
12162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12164 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12165 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12166
12167 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
12168 IEM_MC_PREPARE_FPU_USAGE();
12169 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12170 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12171 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
12172 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12173 } IEM_MC_ELSE() {
12174 IEM_MC_IF_FCW_IM() {
12175 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12176 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
12177 } IEM_MC_ENDIF();
12178 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12179 } IEM_MC_ENDIF();
12180 IEM_MC_ADVANCE_RIP_AND_FINISH();
12181
12182 IEM_MC_END();
12183}
12184
12185
12186/** Opcode 0xdf !11/3. */
12187FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
12188{
12189 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
12190 IEM_MC_BEGIN(3, 2, 0, 0);
12191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12192 IEM_MC_LOCAL(uint16_t, u16Fsw);
12193 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12194 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12195 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12196
12197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12199 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12200 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12201
12202 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
12203 IEM_MC_PREPARE_FPU_USAGE();
12204 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12205 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12206 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
12207 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12208 } IEM_MC_ELSE() {
12209 IEM_MC_IF_FCW_IM() {
12210 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12211 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
12212 } IEM_MC_ENDIF();
12213 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12214 } IEM_MC_ENDIF();
12215 IEM_MC_ADVANCE_RIP_AND_FINISH();
12216
12217 IEM_MC_END();
12218}
12219
12220
12221/** Opcode 0xdf !11/4. */
12222FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
12223{
12224 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
12225
12226 IEM_MC_BEGIN(2, 3, 0, 0);
12227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12228 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12229 IEM_MC_LOCAL(RTPBCD80U, d80Val);
12230 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12231 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
12232
12233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12235
12236 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12237 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12238 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12239
12240 IEM_MC_PREPARE_FPU_USAGE();
12241 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12242 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
12243 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12244 } IEM_MC_ELSE() {
12245 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12246 } IEM_MC_ENDIF();
12247 IEM_MC_ADVANCE_RIP_AND_FINISH();
12248
12249 IEM_MC_END();
12250}
12251
12252
12253/** Opcode 0xdf !11/5. */
12254FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
12255{
12256 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
12257
12258 IEM_MC_BEGIN(2, 3, 0, 0);
12259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12260 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12261 IEM_MC_LOCAL(int64_t, i64Val);
12262 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12263 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
12264
12265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12267
12268 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12269 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12270 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12271
12272 IEM_MC_PREPARE_FPU_USAGE();
12273 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12274 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
12275 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12276 } IEM_MC_ELSE() {
12277 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12278 } IEM_MC_ENDIF();
12279 IEM_MC_ADVANCE_RIP_AND_FINISH();
12280
12281 IEM_MC_END();
12282}
12283
12284
12285/** Opcode 0xdf !11/6. */
12286FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
12287{
12288 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
12289 IEM_MC_BEGIN(3, 2, 0, 0);
12290 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12291 IEM_MC_LOCAL(uint16_t, u16Fsw);
12292 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12293 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
12294 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12295
12296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12298 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12299 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12300
12301 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
12302 IEM_MC_PREPARE_FPU_USAGE();
12303 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12304 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
12305 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
12306 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12307 } IEM_MC_ELSE() {
12308 IEM_MC_IF_FCW_IM() {
12309 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
12310 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
12311 } IEM_MC_ENDIF();
12312 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12313 } IEM_MC_ENDIF();
12314 IEM_MC_ADVANCE_RIP_AND_FINISH();
12315
12316 IEM_MC_END();
12317}
12318
12319
12320/** Opcode 0xdf !11/7. */
12321FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
12322{
12323 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
12324 IEM_MC_BEGIN(3, 2, 0, 0);
12325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12326 IEM_MC_LOCAL(uint16_t, u16Fsw);
12327 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12328 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12329 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12330
12331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12333 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12334 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12335
12336 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
12337 IEM_MC_PREPARE_FPU_USAGE();
12338 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12339 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12340 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
12341 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12342 } IEM_MC_ELSE() {
12343 IEM_MC_IF_FCW_IM() {
12344 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12345 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
12346 } IEM_MC_ENDIF();
12347 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12348 } IEM_MC_ENDIF();
12349 IEM_MC_ADVANCE_RIP_AND_FINISH();
12350
12351 IEM_MC_END();
12352}
12353
12354
12355/**
12356 * @opcode 0xdf
12357 */
12358FNIEMOP_DEF(iemOp_EscF7)
12359{
12360 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12361 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
12362 if (IEM_IS_MODRM_REG_MODE(bRm))
12363 {
12364 switch (IEM_GET_MODRM_REG_8(bRm))
12365 {
12366 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
12367 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
12368 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
12369 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
12370 case 4: if (bRm == 0xe0)
12371 return FNIEMOP_CALL(iemOp_fnstsw_ax);
12372 IEMOP_RAISE_INVALID_OPCODE_RET();
12373 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
12374 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
12375 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12377 }
12378 }
12379 else
12380 {
12381 switch (IEM_GET_MODRM_REG_8(bRm))
12382 {
12383 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
12384 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
12385 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
12386 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
12387 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
12388 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
12389 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
12390 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
12391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12392 }
12393 }
12394}
12395
12396
12397/**
12398 * @opcode 0xe0
12399 */
12400FNIEMOP_DEF(iemOp_loopne_Jb)
12401{
12402 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
12403 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12404 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12405
12406 switch (pVCpu->iem.s.enmEffAddrMode)
12407 {
12408 case IEMMODE_16BIT:
12409 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12411 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12412 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12413 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12414 } IEM_MC_ELSE() {
12415 IEM_MC_ADVANCE_RIP_AND_FINISH();
12416 } IEM_MC_ENDIF();
12417 IEM_MC_END();
12418 break;
12419
12420 case IEMMODE_32BIT:
12421 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12423 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12424 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12425 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12426 } IEM_MC_ELSE() {
12427 IEM_MC_ADVANCE_RIP_AND_FINISH();
12428 } IEM_MC_ENDIF();
12429 IEM_MC_END();
12430 break;
12431
12432 case IEMMODE_64BIT:
12433 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12435 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12436 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12437 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12438 } IEM_MC_ELSE() {
12439 IEM_MC_ADVANCE_RIP_AND_FINISH();
12440 } IEM_MC_ENDIF();
12441 IEM_MC_END();
12442 break;
12443
12444 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12445 }
12446}
12447
12448
12449/**
12450 * @opcode 0xe1
12451 */
12452FNIEMOP_DEF(iemOp_loope_Jb)
12453{
12454 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
12455 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12456 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12457
12458 switch (pVCpu->iem.s.enmEffAddrMode)
12459 {
12460 case IEMMODE_16BIT:
12461 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12463 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12464 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
12465 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12466 } IEM_MC_ELSE() {
12467 IEM_MC_ADVANCE_RIP_AND_FINISH();
12468 } IEM_MC_ENDIF();
12469 IEM_MC_END();
12470 break;
12471
12472 case IEMMODE_32BIT:
12473 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12475 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12476 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
12477 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12478 } IEM_MC_ELSE() {
12479 IEM_MC_ADVANCE_RIP_AND_FINISH();
12480 } IEM_MC_ENDIF();
12481 IEM_MC_END();
12482 break;
12483
12484 case IEMMODE_64BIT:
12485 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12487 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12488 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
12489 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12490 } IEM_MC_ELSE() {
12491 IEM_MC_ADVANCE_RIP_AND_FINISH();
12492 } IEM_MC_ENDIF();
12493 IEM_MC_END();
12494 break;
12495
12496 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12497 }
12498}
12499
12500
12501/**
12502 * @opcode 0xe2
12503 */
12504FNIEMOP_DEF(iemOp_loop_Jb)
12505{
12506 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
12507 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12508 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12509
12510 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
12511 * using the 32-bit operand size override. How can that be restarted? See
12512 * weird pseudo code in intel manual. */
12513
12514 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
12515 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
12516 * the loop causes guest crashes, but when logging it's nice to skip a few million
12517 * lines of useless output. */
12518#if defined(LOG_ENABLED)
12519 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
12520 switch (pVCpu->iem.s.enmEffAddrMode)
12521 {
12522 case IEMMODE_16BIT:
12523 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12525 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
12526 IEM_MC_ADVANCE_RIP_AND_FINISH();
12527 IEM_MC_END();
12528 break;
12529
12530 case IEMMODE_32BIT:
12531 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12533 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
12534 IEM_MC_ADVANCE_RIP_AND_FINISH();
12535 IEM_MC_END();
12536 break;
12537
12538 case IEMMODE_64BIT:
12539 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12541 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
12542 IEM_MC_ADVANCE_RIP_AND_FINISH();
12543 IEM_MC_END();
12544 break;
12545
12546 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12547 }
12548#endif
12549
12550 switch (pVCpu->iem.s.enmEffAddrMode)
12551 {
12552 case IEMMODE_16BIT:
12553 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12555 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12556 IEM_MC_IF_CX_IS_NZ() {
12557 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12558 } IEM_MC_ELSE() {
12559 IEM_MC_ADVANCE_RIP_AND_FINISH();
12560 } IEM_MC_ENDIF();
12561 IEM_MC_END();
12562 break;
12563
12564 case IEMMODE_32BIT:
12565 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12567 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12568 IEM_MC_IF_ECX_IS_NZ() {
12569 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12570 } IEM_MC_ELSE() {
12571 IEM_MC_ADVANCE_RIP_AND_FINISH();
12572 } IEM_MC_ENDIF();
12573 IEM_MC_END();
12574 break;
12575
12576 case IEMMODE_64BIT:
12577 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12579 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12580 IEM_MC_IF_RCX_IS_NZ() {
12581 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12582 } IEM_MC_ELSE() {
12583 IEM_MC_ADVANCE_RIP_AND_FINISH();
12584 } IEM_MC_ENDIF();
12585 IEM_MC_END();
12586 break;
12587
12588 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12589 }
12590}
12591
12592
12593/**
12594 * @opcode 0xe3
12595 */
12596FNIEMOP_DEF(iemOp_jecxz_Jb)
12597{
12598 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
12599 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12600 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12601
12602 switch (pVCpu->iem.s.enmEffAddrMode)
12603 {
12604 case IEMMODE_16BIT:
12605 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12607 IEM_MC_IF_CX_IS_NZ() {
12608 IEM_MC_ADVANCE_RIP_AND_FINISH();
12609 } IEM_MC_ELSE() {
12610 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12611 } IEM_MC_ENDIF();
12612 IEM_MC_END();
12613 break;
12614
12615 case IEMMODE_32BIT:
12616 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12618 IEM_MC_IF_ECX_IS_NZ() {
12619 IEM_MC_ADVANCE_RIP_AND_FINISH();
12620 } IEM_MC_ELSE() {
12621 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12622 } IEM_MC_ENDIF();
12623 IEM_MC_END();
12624 break;
12625
12626 case IEMMODE_64BIT:
12627 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12629 IEM_MC_IF_RCX_IS_NZ() {
12630 IEM_MC_ADVANCE_RIP_AND_FINISH();
12631 } IEM_MC_ELSE() {
12632 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12633 } IEM_MC_ENDIF();
12634 IEM_MC_END();
12635 break;
12636
12637 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12638 }
12639}
12640
12641
12642/** Opcode 0xe4 */
12643FNIEMOP_DEF(iemOp_in_AL_Ib)
12644{
12645 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
12646 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12648 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12649 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12650}
12651
12652
12653/** Opcode 0xe5 */
12654FNIEMOP_DEF(iemOp_in_eAX_Ib)
12655{
12656 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
12657 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12659 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12660 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12661 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12662}
12663
12664
12665/** Opcode 0xe6 */
12666FNIEMOP_DEF(iemOp_out_Ib_AL)
12667{
12668 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
12669 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12671 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12672 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12673}
12674
12675
12676/** Opcode 0xe7 */
12677FNIEMOP_DEF(iemOp_out_Ib_eAX)
12678{
12679 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
12680 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12682 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12683 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12684 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12685}
12686
12687
12688/**
12689 * @opcode 0xe8
12690 */
12691FNIEMOP_DEF(iemOp_call_Jv)
12692{
12693 IEMOP_MNEMONIC(call_Jv, "call Jv");
12694 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12695 switch (pVCpu->iem.s.enmEffOpSize)
12696 {
12697 case IEMMODE_16BIT:
12698 {
12699 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12700 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
12701 iemCImpl_call_rel_16, (int16_t)u16Imm);
12702 }
12703
12704 case IEMMODE_32BIT:
12705 {
12706 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12707 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
12708 iemCImpl_call_rel_32, (int32_t)u32Imm);
12709 }
12710
12711 case IEMMODE_64BIT:
12712 {
12713 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12714 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
12715 iemCImpl_call_rel_64, u64Imm);
12716 }
12717
12718 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12719 }
12720}
12721
12722
12723/**
12724 * @opcode 0xe9
12725 */
12726FNIEMOP_DEF(iemOp_jmp_Jv)
12727{
12728 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
12729 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12730 switch (pVCpu->iem.s.enmEffOpSize)
12731 {
12732 case IEMMODE_16BIT:
12733 IEM_MC_BEGIN(0, 0, 0, 0);
12734 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
12735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12736 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
12737 IEM_MC_END();
12738 break;
12739
12740 case IEMMODE_64BIT:
12741 case IEMMODE_32BIT:
12742 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12743 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
12744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12745 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
12746 IEM_MC_END();
12747 break;
12748
12749 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12750 }
12751}
12752
12753
12754/**
12755 * @opcode 0xea
12756 */
12757FNIEMOP_DEF(iemOp_jmp_Ap)
12758{
12759 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
12760 IEMOP_HLP_NO_64BIT();
12761
12762 /* Decode the far pointer address and pass it on to the far call C implementation. */
12763 uint32_t off32Seg;
12764 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12765 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
12766 else
12767 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
12768 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
12769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12770 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
12771 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
12772 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
12773 /** @todo make task-switches, ring-switches, ++ return non-zero status */
12774}
12775
12776
12777/**
12778 * @opcode 0xeb
12779 */
12780FNIEMOP_DEF(iemOp_jmp_Jb)
12781{
12782 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
12783 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12784 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12785
12786 IEM_MC_BEGIN(0, 0, 0, 0);
12787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12788 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12789 IEM_MC_END();
12790}
12791
12792
12793/** Opcode 0xec */
12794FNIEMOP_DEF(iemOp_in_AL_DX)
12795{
12796 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
12797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12798 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12799 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12800 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
12801}
12802
12803
12804/** Opcode 0xed */
12805FNIEMOP_DEF(iemOp_in_eAX_DX)
12806{
12807 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
12808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12809 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12810 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12811 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12812 pVCpu->iem.s.enmEffAddrMode);
12813}
12814
12815
12816/** Opcode 0xee */
12817FNIEMOP_DEF(iemOp_out_DX_AL)
12818{
12819 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
12820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12821 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12822 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
12823}
12824
12825
12826/** Opcode 0xef */
12827FNIEMOP_DEF(iemOp_out_DX_eAX)
12828{
12829 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
12830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12831 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12832 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12833 pVCpu->iem.s.enmEffAddrMode);
12834}
12835
12836
12837/**
12838 * @opcode 0xf0
12839 */
12840FNIEMOP_DEF(iemOp_lock)
12841{
12842 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
12843 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12844 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
12845
12846 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12847 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12848}
12849
12850
12851/**
12852 * @opcode 0xf1
12853 */
12854FNIEMOP_DEF(iemOp_int1)
12855{
12856 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
12857 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
12858 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
12859 * LOADALL memo. Needs some testing. */
12860 IEMOP_HLP_MIN_386();
12861 /** @todo testcase! */
12862 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
12863 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
12864 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
12865}
12866
12867
12868/**
12869 * @opcode 0xf2
12870 */
12871FNIEMOP_DEF(iemOp_repne)
12872{
12873 /* This overrides any previous REPE prefix. */
12874 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
12875 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
12876 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
12877
12878 /* For the 4 entry opcode tables, REPNZ overrides any previous
12879 REPZ and operand size prefixes. */
12880 pVCpu->iem.s.idxPrefix = 3;
12881
12882 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12883 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12884}
12885
12886
12887/**
12888 * @opcode 0xf3
12889 */
12890FNIEMOP_DEF(iemOp_repe)
12891{
12892 /* This overrides any previous REPNE prefix. */
12893 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
12894 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
12895 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
12896
12897 /* For the 4 entry opcode tables, REPNZ overrides any previous
12898 REPNZ and operand size prefixes. */
12899 pVCpu->iem.s.idxPrefix = 2;
12900
12901 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12902 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12903}
12904
12905
12906/**
12907 * @opcode 0xf4
12908 */
12909FNIEMOP_DEF(iemOp_hlt)
12910{
12911 IEMOP_MNEMONIC(hlt, "hlt");
12912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12913 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_hlt);
12914}
12915
12916
12917/**
12918 * @opcode 0xf5
12919 */
12920FNIEMOP_DEF(iemOp_cmc)
12921{
12922 IEMOP_MNEMONIC(cmc, "cmc");
12923 IEM_MC_BEGIN(0, 0, 0, 0);
12924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12925 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
12926 IEM_MC_ADVANCE_RIP_AND_FINISH();
12927 IEM_MC_END();
12928}
12929
12930
12931/**
12932 * Body for of 'inc/dec/not/neg Eb'.
12933 */
12934#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
12935 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
12936 { \
12937 /* register access */ \
12938 IEM_MC_BEGIN(2, 0, 0, 0); \
12939 IEMOP_HLP_DONE_DECODING(); \
12940 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12941 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12942 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
12943 IEM_MC_REF_EFLAGS(pEFlags); \
12944 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12945 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12946 IEM_MC_END(); \
12947 } \
12948 else \
12949 { \
12950 /* memory access. */ \
12951 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
12952 { \
12953 IEM_MC_BEGIN(2, 2, 0, 0); \
12954 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12955 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12957 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12958 \
12959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12960 IEMOP_HLP_DONE_DECODING(); \
12961 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12962 IEM_MC_FETCH_EFLAGS(EFlags); \
12963 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12964 \
12965 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12966 IEM_MC_COMMIT_EFLAGS(EFlags); \
12967 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12968 IEM_MC_END(); \
12969 } \
12970 else \
12971 { \
12972 IEM_MC_BEGIN(2, 2, 0, 0); \
12973 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12974 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12976 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12977 \
12978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12979 IEMOP_HLP_DONE_DECODING(); \
12980 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12981 IEM_MC_FETCH_EFLAGS(EFlags); \
12982 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
12983 \
12984 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12985 IEM_MC_COMMIT_EFLAGS(EFlags); \
12986 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12987 IEM_MC_END(); \
12988 } \
12989 } \
12990 (void)0
12991
12992
12993/**
12994 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
12995 */
12996#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
12997 if (IEM_IS_MODRM_REG_MODE(bRm)) \
12998 { \
12999 /* \
13000 * Register target \
13001 */ \
13002 switch (pVCpu->iem.s.enmEffOpSize) \
13003 { \
13004 case IEMMODE_16BIT: \
13005 IEM_MC_BEGIN(2, 0, 0, 0); \
13006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13007 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13008 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13009 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13010 IEM_MC_REF_EFLAGS(pEFlags); \
13011 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13012 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13013 IEM_MC_END(); \
13014 break; \
13015 \
13016 case IEMMODE_32BIT: \
13017 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0); \
13018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13019 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13020 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13021 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13022 IEM_MC_REF_EFLAGS(pEFlags); \
13023 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13024 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
13025 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13026 IEM_MC_END(); \
13027 break; \
13028 \
13029 case IEMMODE_64BIT: \
13030 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0); \
13031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13032 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13033 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13034 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13035 IEM_MC_REF_EFLAGS(pEFlags); \
13036 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13037 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13038 IEM_MC_END(); \
13039 break; \
13040 \
13041 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13042 } \
13043 } \
13044 else \
13045 { \
13046 /* \
13047 * Memory target. \
13048 */ \
13049 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
13050 { \
13051 switch (pVCpu->iem.s.enmEffOpSize) \
13052 { \
13053 case IEMMODE_16BIT: \
13054 IEM_MC_BEGIN(2, 3, 0, 0); \
13055 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13056 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13058 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13059 \
13060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13062 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13063 IEM_MC_FETCH_EFLAGS(EFlags); \
13064 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13065 \
13066 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
13067 IEM_MC_COMMIT_EFLAGS(EFlags); \
13068 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13069 IEM_MC_END(); \
13070 break; \
13071 \
13072 case IEMMODE_32BIT: \
13073 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13074 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13075 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13077 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13078 \
13079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13081 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13082 IEM_MC_FETCH_EFLAGS(EFlags); \
13083 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13084 \
13085 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
13086 IEM_MC_COMMIT_EFLAGS(EFlags); \
13087 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13088 IEM_MC_END(); \
13089 break; \
13090 \
13091 case IEMMODE_64BIT: \
13092 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13093 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13094 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13096 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13097 \
13098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13100 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13101 IEM_MC_FETCH_EFLAGS(EFlags); \
13102 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13103 \
13104 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
13105 IEM_MC_COMMIT_EFLAGS(EFlags); \
13106 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13107 IEM_MC_END(); \
13108 break; \
13109 \
13110 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13111 } \
13112 } \
13113 else \
13114 { \
13115 (void)0
13116
13117#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
13118 switch (pVCpu->iem.s.enmEffOpSize) \
13119 { \
13120 case IEMMODE_16BIT: \
13121 IEM_MC_BEGIN(2, 3, 0, 0); \
13122 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13123 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13125 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13126 \
13127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13128 IEMOP_HLP_DONE_DECODING(); \
13129 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13130 IEM_MC_FETCH_EFLAGS(EFlags); \
13131 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
13132 \
13133 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
13134 IEM_MC_COMMIT_EFLAGS(EFlags); \
13135 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13136 IEM_MC_END(); \
13137 break; \
13138 \
13139 case IEMMODE_32BIT: \
13140 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13141 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13142 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13144 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13145 \
13146 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13147 IEMOP_HLP_DONE_DECODING(); \
13148 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13149 IEM_MC_FETCH_EFLAGS(EFlags); \
13150 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
13151 \
13152 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
13153 IEM_MC_COMMIT_EFLAGS(EFlags); \
13154 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13155 IEM_MC_END(); \
13156 break; \
13157 \
13158 case IEMMODE_64BIT: \
13159 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13160 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13161 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13163 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13164 \
13165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13166 IEMOP_HLP_DONE_DECODING(); \
13167 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13168 IEM_MC_FETCH_EFLAGS(EFlags); \
13169 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
13170 \
13171 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
13172 IEM_MC_COMMIT_EFLAGS(EFlags); \
13173 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13174 IEM_MC_END(); \
13175 break; \
13176 \
13177 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13178 } \
13179 } \
13180 } \
13181 (void)0
13182
13183
13184/**
13185 * @opmaps grp3_f6
13186 * @opcode /0
13187 * @todo also /1
13188 */
13189FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
13190{
13191 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
13192 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
13193
13194 if (IEM_IS_MODRM_REG_MODE(bRm))
13195 {
13196 /* register access */
13197 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13198 IEM_MC_BEGIN(3, 0, 0, 0);
13199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13200 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13201 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
13202 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13203 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13204 IEM_MC_REF_EFLAGS(pEFlags);
13205 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13206 IEM_MC_ADVANCE_RIP_AND_FINISH();
13207 IEM_MC_END();
13208 }
13209 else
13210 {
13211 /* memory access. */
13212 IEM_MC_BEGIN(3, 3, 0, 0);
13213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13214 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13215
13216 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13218
13219 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13220 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
13221 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13222
13223 IEM_MC_ARG_CONST(uint8_t, u8Src, u8Imm, 1);
13224 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13225 IEM_MC_FETCH_EFLAGS(EFlags);
13226 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13227
13228 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo);
13229 IEM_MC_COMMIT_EFLAGS(EFlags);
13230 IEM_MC_ADVANCE_RIP_AND_FINISH();
13231 IEM_MC_END();
13232 }
13233}
13234
13235
13236/** Opcode 0xf6 /4, /5, /6 and /7. */
13237FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
13238{
13239 if (IEM_IS_MODRM_REG_MODE(bRm))
13240 {
13241 /* register access */
13242 IEM_MC_BEGIN(3, 1, 0, 0);
13243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13244 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13245 IEM_MC_ARG(uint8_t, u8Value, 1);
13246 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13247 IEM_MC_LOCAL(int32_t, rc);
13248
13249 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13250 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13251 IEM_MC_REF_EFLAGS(pEFlags);
13252 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
13253 IEM_MC_IF_LOCAL_IS_Z(rc) {
13254 IEM_MC_ADVANCE_RIP_AND_FINISH();
13255 } IEM_MC_ELSE() {
13256 IEM_MC_RAISE_DIVIDE_ERROR();
13257 } IEM_MC_ENDIF();
13258
13259 IEM_MC_END();
13260 }
13261 else
13262 {
13263 /* memory access. */
13264 IEM_MC_BEGIN(3, 2, 0, 0);
13265 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13266 IEM_MC_ARG(uint8_t, u8Value, 1);
13267 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13268 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13269 IEM_MC_LOCAL(int32_t, rc);
13270
13271 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13273 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13274 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13275 IEM_MC_REF_EFLAGS(pEFlags);
13276 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
13277 IEM_MC_IF_LOCAL_IS_Z(rc) {
13278 IEM_MC_ADVANCE_RIP_AND_FINISH();
13279 } IEM_MC_ELSE() {
13280 IEM_MC_RAISE_DIVIDE_ERROR();
13281 } IEM_MC_ENDIF();
13282
13283 IEM_MC_END();
13284 }
13285}
13286
13287
13288/** Opcode 0xf7 /4, /5, /6 and /7. */
13289FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
13290{
13291 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13292
13293 if (IEM_IS_MODRM_REG_MODE(bRm))
13294 {
13295 /* register access */
13296 switch (pVCpu->iem.s.enmEffOpSize)
13297 {
13298 case IEMMODE_16BIT:
13299 IEM_MC_BEGIN(4, 1, 0, 0);
13300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13301 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13302 IEM_MC_ARG(uint16_t *, pu16DX, 1);
13303 IEM_MC_ARG(uint16_t, u16Value, 2);
13304 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13305 IEM_MC_LOCAL(int32_t, rc);
13306
13307 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13308 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13309 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
13310 IEM_MC_REF_EFLAGS(pEFlags);
13311 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
13312 IEM_MC_IF_LOCAL_IS_Z(rc) {
13313 IEM_MC_ADVANCE_RIP_AND_FINISH();
13314 } IEM_MC_ELSE() {
13315 IEM_MC_RAISE_DIVIDE_ERROR();
13316 } IEM_MC_ENDIF();
13317
13318 IEM_MC_END();
13319 break;
13320
13321 case IEMMODE_32BIT:
13322 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386, 0);
13323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13324 IEM_MC_ARG(uint32_t *, pu32AX, 0);
13325 IEM_MC_ARG(uint32_t *, pu32DX, 1);
13326 IEM_MC_ARG(uint32_t, u32Value, 2);
13327 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13328 IEM_MC_LOCAL(int32_t, rc);
13329
13330 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13331 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
13332 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
13333 IEM_MC_REF_EFLAGS(pEFlags);
13334 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
13335 IEM_MC_IF_LOCAL_IS_Z(rc) {
13336 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
13337 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
13338 IEM_MC_ADVANCE_RIP_AND_FINISH();
13339 } IEM_MC_ELSE() {
13340 IEM_MC_RAISE_DIVIDE_ERROR();
13341 } IEM_MC_ENDIF();
13342
13343 IEM_MC_END();
13344 break;
13345
13346 case IEMMODE_64BIT:
13347 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0);
13348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13349 IEM_MC_ARG(uint64_t *, pu64AX, 0);
13350 IEM_MC_ARG(uint64_t *, pu64DX, 1);
13351 IEM_MC_ARG(uint64_t, u64Value, 2);
13352 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13353 IEM_MC_LOCAL(int32_t, rc);
13354
13355 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13356 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
13357 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
13358 IEM_MC_REF_EFLAGS(pEFlags);
13359 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
13360 IEM_MC_IF_LOCAL_IS_Z(rc) {
13361 IEM_MC_ADVANCE_RIP_AND_FINISH();
13362 } IEM_MC_ELSE() {
13363 IEM_MC_RAISE_DIVIDE_ERROR();
13364 } IEM_MC_ENDIF();
13365
13366 IEM_MC_END();
13367 break;
13368
13369 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13370 }
13371 }
13372 else
13373 {
13374 /* memory access. */
13375 switch (pVCpu->iem.s.enmEffOpSize)
13376 {
13377 case IEMMODE_16BIT:
13378 IEM_MC_BEGIN(4, 2, 0, 0);
13379 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13380 IEM_MC_ARG(uint16_t *, pu16DX, 1);
13381 IEM_MC_ARG(uint16_t, u16Value, 2);
13382 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13384 IEM_MC_LOCAL(int32_t, rc);
13385
13386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13388 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13389 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13390 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
13391 IEM_MC_REF_EFLAGS(pEFlags);
13392 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
13393 IEM_MC_IF_LOCAL_IS_Z(rc) {
13394 IEM_MC_ADVANCE_RIP_AND_FINISH();
13395 } IEM_MC_ELSE() {
13396 IEM_MC_RAISE_DIVIDE_ERROR();
13397 } IEM_MC_ENDIF();
13398
13399 IEM_MC_END();
13400 break;
13401
13402 case IEMMODE_32BIT:
13403 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386, 0);
13404 IEM_MC_ARG(uint32_t *, pu32AX, 0);
13405 IEM_MC_ARG(uint32_t *, pu32DX, 1);
13406 IEM_MC_ARG(uint32_t, u32Value, 2);
13407 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13409 IEM_MC_LOCAL(int32_t, rc);
13410
13411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13413 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13414 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
13415 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
13416 IEM_MC_REF_EFLAGS(pEFlags);
13417 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
13418 IEM_MC_IF_LOCAL_IS_Z(rc) {
13419 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
13420 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
13421 IEM_MC_ADVANCE_RIP_AND_FINISH();
13422 } IEM_MC_ELSE() {
13423 IEM_MC_RAISE_DIVIDE_ERROR();
13424 } IEM_MC_ENDIF();
13425
13426 IEM_MC_END();
13427 break;
13428
13429 case IEMMODE_64BIT:
13430 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT, 0);
13431 IEM_MC_ARG(uint64_t *, pu64AX, 0);
13432 IEM_MC_ARG(uint64_t *, pu64DX, 1);
13433 IEM_MC_ARG(uint64_t, u64Value, 2);
13434 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13436 IEM_MC_LOCAL(int32_t, rc);
13437
13438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13440 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13441 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
13442 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
13443 IEM_MC_REF_EFLAGS(pEFlags);
13444 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
13445 IEM_MC_IF_LOCAL_IS_Z(rc) {
13446 IEM_MC_ADVANCE_RIP_AND_FINISH();
13447 } IEM_MC_ELSE() {
13448 IEM_MC_RAISE_DIVIDE_ERROR();
13449 } IEM_MC_ENDIF();
13450
13451 IEM_MC_END();
13452 break;
13453
13454 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13455 }
13456 }
13457}
13458
13459
13460/**
13461 * @opmaps grp3_f6
13462 * @opcode /2
13463 */
13464FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
13465{
13466 IEMOP_MNEMONIC(not_Eb, "not Eb");
13467 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
13468}
13469
13470
13471/**
13472 * @opmaps grp3_f6
13473 * @opcode /3
13474 */
13475FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
13476{
13477 IEMOP_MNEMONIC(net_Eb, "neg Eb");
13478 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
13479}
13480
13481
13482/**
13483 * @opcode 0xf6
13484 */
13485FNIEMOP_DEF(iemOp_Grp3_Eb)
13486{
13487 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13488 switch (IEM_GET_MODRM_REG_8(bRm))
13489 {
13490 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
13491 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
13492 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
13493 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
13494 case 4:
13495 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
13496 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13497 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
13498 case 5:
13499 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
13500 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13501 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
13502 case 6:
13503 IEMOP_MNEMONIC(div_Eb, "div Eb");
13504 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13505 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
13506 case 7:
13507 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
13508 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13509 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
13510 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13511 }
13512}
13513
13514
13515/** Opcode 0xf7 /0. */
13516FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
13517{
13518 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
13519 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
13520
13521 if (IEM_IS_MODRM_REG_MODE(bRm))
13522 {
13523 /* register access */
13524 switch (pVCpu->iem.s.enmEffOpSize)
13525 {
13526 case IEMMODE_16BIT:
13527 IEM_MC_BEGIN(3, 0, 0, 0);
13528 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13530 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13531 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
13532 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13533 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13534 IEM_MC_REF_EFLAGS(pEFlags);
13535 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13536 IEM_MC_ADVANCE_RIP_AND_FINISH();
13537 IEM_MC_END();
13538 break;
13539
13540 case IEMMODE_32BIT:
13541 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
13542 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13544 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13545 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
13546 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13547 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13548 IEM_MC_REF_EFLAGS(pEFlags);
13549 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13550 /* No clearing the high dword here - test doesn't write back the result. */
13551 IEM_MC_ADVANCE_RIP_AND_FINISH();
13552 IEM_MC_END();
13553 break;
13554
13555 case IEMMODE_64BIT:
13556 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
13557 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13559 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13560 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
13561 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13562 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13563 IEM_MC_REF_EFLAGS(pEFlags);
13564 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13565 IEM_MC_ADVANCE_RIP_AND_FINISH();
13566 IEM_MC_END();
13567 break;
13568
13569 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13570 }
13571 }
13572 else
13573 {
13574 /* memory access. */
13575 switch (pVCpu->iem.s.enmEffOpSize)
13576 {
13577 case IEMMODE_16BIT:
13578 IEM_MC_BEGIN(3, 3, 0, 0);
13579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13581
13582 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13584
13585 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13586 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
13587 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13588
13589 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
13590 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13591 IEM_MC_FETCH_EFLAGS(EFlags);
13592 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13593
13594 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo);
13595 IEM_MC_COMMIT_EFLAGS(EFlags);
13596 IEM_MC_ADVANCE_RIP_AND_FINISH();
13597 IEM_MC_END();
13598 break;
13599
13600 case IEMMODE_32BIT:
13601 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
13602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13604
13605 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13607
13608 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13609 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
13610 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13611
13612 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
13613 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13614 IEM_MC_FETCH_EFLAGS(EFlags);
13615 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13616
13617 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo);
13618 IEM_MC_COMMIT_EFLAGS(EFlags);
13619 IEM_MC_ADVANCE_RIP_AND_FINISH();
13620 IEM_MC_END();
13621 break;
13622
13623 case IEMMODE_64BIT:
13624 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
13625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13627
13628 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13630
13631 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
13632 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13633 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13634
13635 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1);
13636 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13637 IEM_MC_FETCH_EFLAGS(EFlags);
13638 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13639
13640 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo);
13641 IEM_MC_COMMIT_EFLAGS(EFlags);
13642 IEM_MC_ADVANCE_RIP_AND_FINISH();
13643 IEM_MC_END();
13644 break;
13645
13646 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13647 }
13648 }
13649}
13650
13651
13652/** Opcode 0xf7 /2. */
13653FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
13654{
13655 IEMOP_MNEMONIC(not_Ev, "not Ev");
13656 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
13657 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
13658}
13659
13660
13661/** Opcode 0xf7 /3. */
13662FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
13663{
13664 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
13665 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
13666 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
13667}
13668
13669
13670/**
13671 * @opcode 0xf7
13672 */
13673FNIEMOP_DEF(iemOp_Grp3_Ev)
13674{
13675 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13676 switch (IEM_GET_MODRM_REG_8(bRm))
13677 {
13678 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13679 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13680 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
13681 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
13682 case 4:
13683 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
13684 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13685 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
13686 case 5:
13687 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
13688 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13689 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
13690 case 6:
13691 IEMOP_MNEMONIC(div_Ev, "div Ev");
13692 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13693 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
13694 case 7:
13695 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
13696 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13697 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
13698 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13699 }
13700}
13701
13702
13703/**
13704 * @opcode 0xf8
13705 */
13706FNIEMOP_DEF(iemOp_clc)
13707{
13708 IEMOP_MNEMONIC(clc, "clc");
13709 IEM_MC_BEGIN(0, 0, 0, 0);
13710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13711 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
13712 IEM_MC_ADVANCE_RIP_AND_FINISH();
13713 IEM_MC_END();
13714}
13715
13716
13717/**
13718 * @opcode 0xf9
13719 */
13720FNIEMOP_DEF(iemOp_stc)
13721{
13722 IEMOP_MNEMONIC(stc, "stc");
13723 IEM_MC_BEGIN(0, 0, 0, 0);
13724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13725 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
13726 IEM_MC_ADVANCE_RIP_AND_FINISH();
13727 IEM_MC_END();
13728}
13729
13730
13731/**
13732 * @opcode 0xfa
13733 */
13734FNIEMOP_DEF(iemOp_cli)
13735{
13736 IEMOP_MNEMONIC(cli, "cli");
13737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13738 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, 0, iemCImpl_cli);
13739}
13740
13741
13742FNIEMOP_DEF(iemOp_sti)
13743{
13744 IEMOP_MNEMONIC(sti, "sti");
13745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13746 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
13747 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, 0, iemCImpl_sti);
13748}
13749
13750
13751/**
13752 * @opcode 0xfc
13753 */
13754FNIEMOP_DEF(iemOp_cld)
13755{
13756 IEMOP_MNEMONIC(cld, "cld");
13757 IEM_MC_BEGIN(0, 0, 0, 0);
13758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13759 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
13760 IEM_MC_ADVANCE_RIP_AND_FINISH();
13761 IEM_MC_END();
13762}
13763
13764
13765/**
13766 * @opcode 0xfd
13767 */
13768FNIEMOP_DEF(iemOp_std)
13769{
13770 IEMOP_MNEMONIC(std, "std");
13771 IEM_MC_BEGIN(0, 0, 0, 0);
13772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13773 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
13774 IEM_MC_ADVANCE_RIP_AND_FINISH();
13775 IEM_MC_END();
13776}
13777
13778
13779/**
13780 * @opmaps grp4
13781 * @opcode /0
13782 */
13783FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
13784{
13785 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
13786 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
13787}
13788
13789
13790/**
13791 * @opmaps grp4
13792 * @opcode /1
13793 */
13794FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
13795{
13796 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
13797 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
13798}
13799
13800
13801/**
13802 * @opcode 0xfe
13803 */
13804FNIEMOP_DEF(iemOp_Grp4)
13805{
13806 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13807 switch (IEM_GET_MODRM_REG_8(bRm))
13808 {
13809 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
13810 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
13811 default:
13812 /** @todo is the eff-addr decoded? */
13813 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
13814 IEMOP_RAISE_INVALID_OPCODE_RET();
13815 }
13816}
13817
13818/** Opcode 0xff /0. */
13819FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
13820{
13821 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
13822 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
13823 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
13824}
13825
13826
13827/** Opcode 0xff /1. */
13828FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
13829{
13830 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
13831 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
13832 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
13833}
13834
13835
13836/**
13837 * Opcode 0xff /2.
13838 * @param bRm The RM byte.
13839 */
13840FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
13841{
13842 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
13843 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13844
13845 if (IEM_IS_MODRM_REG_MODE(bRm))
13846 {
13847 /* The new RIP is taken from a register. */
13848 switch (pVCpu->iem.s.enmEffOpSize)
13849 {
13850 case IEMMODE_16BIT:
13851 IEM_MC_BEGIN(1, 0, 0, 0);
13852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13853 IEM_MC_ARG(uint16_t, u16Target, 0);
13854 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13855 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
13856 IEM_MC_END();
13857 break;
13858
13859 case IEMMODE_32BIT:
13860 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
13861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13862 IEM_MC_ARG(uint32_t, u32Target, 0);
13863 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13864 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
13865 IEM_MC_END();
13866 break;
13867
13868 case IEMMODE_64BIT:
13869 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
13870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13871 IEM_MC_ARG(uint64_t, u64Target, 0);
13872 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13873 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
13874 IEM_MC_END();
13875 break;
13876
13877 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13878 }
13879 }
13880 else
13881 {
13882 /* The new RIP is taken from a register. */
13883 switch (pVCpu->iem.s.enmEffOpSize)
13884 {
13885 case IEMMODE_16BIT:
13886 IEM_MC_BEGIN(1, 1, 0, 0);
13887 IEM_MC_ARG(uint16_t, u16Target, 0);
13888 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13889 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13891 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13892 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
13893 IEM_MC_END();
13894 break;
13895
13896 case IEMMODE_32BIT:
13897 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
13898 IEM_MC_ARG(uint32_t, u32Target, 0);
13899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13902 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13903 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
13904 IEM_MC_END();
13905 break;
13906
13907 case IEMMODE_64BIT:
13908 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT, 0);
13909 IEM_MC_ARG(uint64_t, u64Target, 0);
13910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13913 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13914 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
13915 IEM_MC_END();
13916 break;
13917
13918 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13919 }
13920 }
13921}
13922
13923#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl, a_fCImplExtra) \
13924 /* Registers? How?? */ \
13925 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
13926 { /* likely */ } \
13927 else \
13928 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
13929 \
13930 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
13931 /** @todo what does VIA do? */ \
13932 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
13933 { /* likely */ } \
13934 else \
13935 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
13936 \
13937 /* Far pointer loaded from memory. */ \
13938 switch (pVCpu->iem.s.enmEffOpSize) \
13939 { \
13940 case IEMMODE_16BIT: \
13941 IEM_MC_BEGIN(3, 1, 0, 0); \
13942 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13943 IEM_MC_ARG(uint16_t, offSeg, 1); \
13944 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
13945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13948 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13949 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
13950 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
13951 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
13952 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13953 IEM_MC_END(); \
13954 break; \
13955 \
13956 case IEMMODE_32BIT: \
13957 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
13958 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13959 IEM_MC_ARG(uint32_t, offSeg, 1); \
13960 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
13961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13964 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13965 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
13966 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
13967 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
13968 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13969 IEM_MC_END(); \
13970 break; \
13971 \
13972 case IEMMODE_64BIT: \
13973 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
13974 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0); \
13975 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13976 IEM_MC_ARG(uint64_t, offSeg, 1); \
13977 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
13978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13981 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13982 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
13983 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
13984 | IEM_CIMPL_F_MODE /* no gates */, 0, \
13985 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13986 IEM_MC_END(); \
13987 break; \
13988 \
13989 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13990 } do {} while (0)
13991
13992
13993/**
13994 * Opcode 0xff /3.
13995 * @param bRm The RM byte.
13996 */
13997FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
13998{
13999 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
14000 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf, IEM_CIMPL_F_BRANCH_STACK);
14001}
14002
14003
14004/**
14005 * Opcode 0xff /4.
14006 * @param bRm The RM byte.
14007 */
14008FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
14009{
14010 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
14011 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14012
14013 if (IEM_IS_MODRM_REG_MODE(bRm))
14014 {
14015 /* The new RIP is taken from a register. */
14016 switch (pVCpu->iem.s.enmEffOpSize)
14017 {
14018 case IEMMODE_16BIT:
14019 IEM_MC_BEGIN(0, 1, 0, 0);
14020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14021 IEM_MC_LOCAL(uint16_t, u16Target);
14022 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14023 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14024 IEM_MC_END();
14025 break;
14026
14027 case IEMMODE_32BIT:
14028 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
14029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14030 IEM_MC_LOCAL(uint32_t, u32Target);
14031 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14032 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14033 IEM_MC_END();
14034 break;
14035
14036 case IEMMODE_64BIT:
14037 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
14038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14039 IEM_MC_LOCAL(uint64_t, u64Target);
14040 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14041 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14042 IEM_MC_END();
14043 break;
14044
14045 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14046 }
14047 }
14048 else
14049 {
14050 /* The new RIP is taken from a memory location. */
14051 switch (pVCpu->iem.s.enmEffOpSize)
14052 {
14053 case IEMMODE_16BIT:
14054 IEM_MC_BEGIN(0, 2, 0, 0);
14055 IEM_MC_LOCAL(uint16_t, u16Target);
14056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14057 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14059 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14060 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14061 IEM_MC_END();
14062 break;
14063
14064 case IEMMODE_32BIT:
14065 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
14066 IEM_MC_LOCAL(uint32_t, u32Target);
14067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14070 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14071 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14072 IEM_MC_END();
14073 break;
14074
14075 case IEMMODE_64BIT:
14076 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14077 IEM_MC_LOCAL(uint64_t, u64Target);
14078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14081 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14082 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14083 IEM_MC_END();
14084 break;
14085
14086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14087 }
14088 }
14089}
14090
14091
14092/**
14093 * Opcode 0xff /5.
14094 * @param bRm The RM byte.
14095 */
14096FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
14097{
14098 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
14099 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp, 0);
14100}
14101
14102
14103/**
14104 * Opcode 0xff /6.
14105 * @param bRm The RM byte.
14106 */
14107FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
14108{
14109 IEMOP_MNEMONIC(push_Ev, "push Ev");
14110
14111 /* Registers are handled by a common worker. */
14112 if (IEM_IS_MODRM_REG_MODE(bRm))
14113 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
14114
14115 /* Memory we do here. */
14116 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14117 switch (pVCpu->iem.s.enmEffOpSize)
14118 {
14119 case IEMMODE_16BIT:
14120 IEM_MC_BEGIN(0, 2, 0, 0);
14121 IEM_MC_LOCAL(uint16_t, u16Src);
14122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14123 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14125 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14126 IEM_MC_PUSH_U16(u16Src);
14127 IEM_MC_ADVANCE_RIP_AND_FINISH();
14128 IEM_MC_END();
14129 break;
14130
14131 case IEMMODE_32BIT:
14132 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
14133 IEM_MC_LOCAL(uint32_t, u32Src);
14134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14137 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14138 IEM_MC_PUSH_U32(u32Src);
14139 IEM_MC_ADVANCE_RIP_AND_FINISH();
14140 IEM_MC_END();
14141 break;
14142
14143 case IEMMODE_64BIT:
14144 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14145 IEM_MC_LOCAL(uint64_t, u64Src);
14146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14149 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14150 IEM_MC_PUSH_U64(u64Src);
14151 IEM_MC_ADVANCE_RIP_AND_FINISH();
14152 IEM_MC_END();
14153 break;
14154
14155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14156 }
14157}
14158
14159
14160/**
14161 * @opcode 0xff
14162 */
14163FNIEMOP_DEF(iemOp_Grp5)
14164{
14165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14166 switch (IEM_GET_MODRM_REG_8(bRm))
14167 {
14168 case 0:
14169 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
14170 case 1:
14171 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
14172 case 2:
14173 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
14174 case 3:
14175 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
14176 case 4:
14177 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
14178 case 5:
14179 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
14180 case 6:
14181 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
14182 case 7:
14183 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
14184 IEMOP_RAISE_INVALID_OPCODE_RET();
14185 }
14186 AssertFailedReturn(VERR_IEM_IPE_3);
14187}
14188
14189
14190
14191const PFNIEMOP g_apfnOneByteMap[256] =
14192{
14193 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
14194 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
14195 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
14196 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
14197 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
14198 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
14199 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
14200 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
14201 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
14202 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
14203 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
14204 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
14205 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
14206 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
14207 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
14208 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
14209 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
14210 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
14211 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
14212 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
14213 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
14214 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
14215 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
14216 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
14217 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
14218 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
14219 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
14220 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
14221 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
14222 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
14223 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
14224 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
14225 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
14226 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
14227 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
14228 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
14229 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
14230 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
14231 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
14232 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
14233 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
14234 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
14235 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
14236 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
14237 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
14238 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
14239 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
14240 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
14241 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
14242 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
14243 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
14244 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
14245 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
14246 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
14247 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
14248 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
14249 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
14250 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
14251 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
14252 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
14253 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
14254 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
14255 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
14256 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
14257};
14258
14259
14260/** @} */
14261
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette