VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 100833

Last change on this file since 100833 was 100833, checked in by vboxsync, 20 months ago

VMM/IEM: More conversion from IEM_MC_MEM_MAP to IEM_MC_MEM_MAP_XXX. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 483.8 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 100833 2023-08-09 14:40:15Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 *
63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
64 */
65#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8) \
66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
67 \
68 /* \
69 * If rm is denoting a register, no more instruction bytes. \
70 */ \
71 if (IEM_IS_MODRM_REG_MODE(bRm)) \
72 { \
73 IEM_MC_BEGIN(3, 0); \
74 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
75 IEM_MC_ARG(uint8_t, u8Src, 1); \
76 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
77 \
78 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
79 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
80 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
81 IEM_MC_REF_EFLAGS(pEFlags); \
82 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
83 \
84 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
85 IEM_MC_END(); \
86 } \
87 else \
88 { \
89 /* \
90 * We're accessing memory. \
91 * Note! We're putting the eflags on the stack here so we can commit them \
92 * after the memory. \
93 */ \
94 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
95 { \
96 IEM_MC_BEGIN(3, 3); \
97 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
98 IEM_MC_ARG(uint8_t, u8Src, 1); \
99 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
101 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
102 \
103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
104 IEMOP_HLP_DONE_DECODING(); \
105 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
106 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
107 IEM_MC_FETCH_EFLAGS(EFlags); \
108 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
109 \
110 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
111 IEM_MC_COMMIT_EFLAGS(EFlags); \
112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
113 IEM_MC_END(); \
114 } \
115 else \
116 { \
117 (void)0
118
119/**
120 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
121 * operands.
122 *
123 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
124 */
125#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8) \
126 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
127 \
128 /* \
129 * If rm is denoting a register, no more instruction bytes. \
130 */ \
131 if (IEM_IS_MODRM_REG_MODE(bRm)) \
132 { \
133 IEM_MC_BEGIN(3, 0); \
134 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
135 IEM_MC_ARG(uint8_t, u8Src, 1); \
136 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
137 \
138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
139 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
140 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
141 IEM_MC_REF_EFLAGS(pEFlags); \
142 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
143 \
144 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
145 IEM_MC_END(); \
146 } \
147 else \
148 { \
149 /* \
150 * We're accessing memory. \
151 * Note! We're putting the eflags on the stack here so we can commit them \
152 * after the memory. \
153 */ \
154 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
155 { \
156 IEM_MC_BEGIN(3, 3); \
157 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
158 IEM_MC_ARG(uint8_t, u8Src, 1); \
159 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
161 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
162 \
163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
164 IEMOP_HLP_DONE_DECODING(); \
165 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
166 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
167 IEM_MC_FETCH_EFLAGS(EFlags); \
168 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
169 \
170 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
171 IEM_MC_COMMIT_EFLAGS(EFlags); \
172 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
173 IEM_MC_END(); \
174 } \
175 else \
176 { \
177 (void)0
178
179#define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \
180 IEMOP_HLP_DONE_DECODING(); \
181 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
182 } \
183 } \
184 (void)0
185
186#define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \
187 IEM_MC_BEGIN(3, 3); \
188 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
189 IEM_MC_ARG(uint8_t, u8Src, 1); \
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
192 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
193 \
194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
195 IEMOP_HLP_DONE_DECODING(); \
196 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
197 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
198 IEM_MC_FETCH_EFLAGS(EFlags); \
199 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
200 \
201 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bMapInfoDst); \
202 IEM_MC_COMMIT_EFLAGS(EFlags); \
203 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
204 IEM_MC_END(); \
205 } \
206 } \
207 (void)0
208
209/**
210 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
211 * destination.
212 */
213#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
215 \
216 /* \
217 * If rm is denoting a register, no more instruction bytes. \
218 */ \
219 if (IEM_IS_MODRM_REG_MODE(bRm)) \
220 { \
221 IEM_MC_BEGIN(3, 0); \
222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
223 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
224 IEM_MC_ARG(uint8_t, u8Src, 1); \
225 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
226 \
227 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
228 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
229 IEM_MC_REF_EFLAGS(pEFlags); \
230 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
231 \
232 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
233 IEM_MC_END(); \
234 } \
235 else \
236 { \
237 /* \
238 * We're accessing memory. \
239 */ \
240 IEM_MC_BEGIN(3, 1); \
241 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
242 IEM_MC_ARG(uint8_t, u8Src, 1); \
243 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
245 \
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
248 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
249 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
250 IEM_MC_REF_EFLAGS(pEFlags); \
251 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
252 \
253 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
254 IEM_MC_END(); \
255 } \
256 (void)0
257
258
259/**
260 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
261 * memory/register as the destination.
262 */
263#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
265 \
266 /* \
267 * If rm is denoting a register, no more instruction bytes. \
268 */ \
269 if (IEM_IS_MODRM_REG_MODE(bRm)) \
270 { \
271 switch (pVCpu->iem.s.enmEffOpSize) \
272 { \
273 case IEMMODE_16BIT: \
274 IEM_MC_BEGIN(3, 0); \
275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
276 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
277 IEM_MC_ARG(uint16_t, u16Src, 1); \
278 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
279 \
280 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
281 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
282 IEM_MC_REF_EFLAGS(pEFlags); \
283 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
284 \
285 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
286 IEM_MC_END(); \
287 break; \
288 \
289 case IEMMODE_32BIT: \
290 IEM_MC_BEGIN(3, 0); \
291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
292 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
293 IEM_MC_ARG(uint32_t, u32Src, 1); \
294 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
295 \
296 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
297 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
298 IEM_MC_REF_EFLAGS(pEFlags); \
299 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
300 \
301 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
302 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
303 IEM_MC_END(); \
304 break; \
305 \
306 case IEMMODE_64BIT: \
307 IEM_MC_BEGIN(3, 0); \
308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
309 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
310 IEM_MC_ARG(uint64_t, u64Src, 1); \
311 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
312 \
313 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
314 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
315 IEM_MC_REF_EFLAGS(pEFlags); \
316 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
317 \
318 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
319 IEM_MC_END(); \
320 break; \
321 \
322 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
323 } \
324 } \
325 else \
326 { \
327 /* \
328 * We're accessing memory. \
329 * Note! We're putting the eflags on the stack here so we can commit them \
330 * after the memory. \
331 */ \
332 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
333 { \
334 switch (pVCpu->iem.s.enmEffOpSize) \
335 { \
336 case IEMMODE_16BIT: \
337 IEM_MC_BEGIN(3, 3); \
338 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
339 IEM_MC_ARG(uint16_t, u16Src, 1); \
340 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
342 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
343 \
344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
345 IEMOP_HLP_DONE_DECODING(); \
346 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
347 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
348 IEM_MC_FETCH_EFLAGS(EFlags); \
349 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
350 \
351 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
352 IEM_MC_COMMIT_EFLAGS(EFlags); \
353 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
354 IEM_MC_END(); \
355 break; \
356 \
357 case IEMMODE_32BIT: \
358 IEM_MC_BEGIN(3, 3); \
359 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
360 IEM_MC_ARG(uint32_t, u32Src, 1); \
361 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
363 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
364 \
365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
366 IEMOP_HLP_DONE_DECODING(); \
367 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
368 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
369 IEM_MC_FETCH_EFLAGS(EFlags); \
370 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
371 \
372 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
373 IEM_MC_COMMIT_EFLAGS(EFlags); \
374 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
375 IEM_MC_END(); \
376 break; \
377 \
378 case IEMMODE_64BIT: \
379 IEM_MC_BEGIN(3, 3); \
380 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
381 IEM_MC_ARG(uint64_t, u64Src, 1); \
382 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
384 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
385 \
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
387 IEMOP_HLP_DONE_DECODING(); \
388 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
389 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
390 IEM_MC_FETCH_EFLAGS(EFlags); \
391 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
392 \
393 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
394 IEM_MC_COMMIT_EFLAGS(EFlags); \
395 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
396 IEM_MC_END(); \
397 break; \
398 \
399 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
400 } \
401 } \
402 else \
403 { \
404 (void)0
405/* Separate macro to work around parsing issue in IEMAllInstPython.py */
406#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
407 switch (pVCpu->iem.s.enmEffOpSize) \
408 { \
409 case IEMMODE_16BIT: \
410 IEM_MC_BEGIN(3, 3); \
411 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
412 IEM_MC_ARG(uint16_t, u16Src, 1); \
413 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
415 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
416 \
417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
418 IEMOP_HLP_DONE_DECODING(); \
419 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
420 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
421 IEM_MC_FETCH_EFLAGS(EFlags); \
422 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
423 \
424 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
425 IEM_MC_COMMIT_EFLAGS(EFlags); \
426 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
427 IEM_MC_END(); \
428 break; \
429 \
430 case IEMMODE_32BIT: \
431 IEM_MC_BEGIN(3, 3); \
432 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
433 IEM_MC_ARG(uint32_t, u32Src, 1); \
434 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
436 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
437 \
438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
439 IEMOP_HLP_DONE_DECODING(); \
440 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
441 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
442 IEM_MC_FETCH_EFLAGS(EFlags); \
443 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
444 \
445 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo /* CMP,TEST */); \
446 IEM_MC_COMMIT_EFLAGS(EFlags); \
447 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
448 IEM_MC_END(); \
449 break; \
450 \
451 case IEMMODE_64BIT: \
452 IEM_MC_BEGIN(3, 3); \
453 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
454 IEM_MC_ARG(uint64_t, u64Src, 1); \
455 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
457 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
458 \
459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
460 IEMOP_HLP_DONE_DECODING(); \
461 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
462 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
463 IEM_MC_FETCH_EFLAGS(EFlags); \
464 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
465 \
466 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
467 IEM_MC_COMMIT_EFLAGS(EFlags); \
468 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
469 IEM_MC_END(); \
470 break; \
471 \
472 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
473 } \
474 } \
475 } \
476 (void)0
477
478/**
479 * Body for read-only word/dword/qword instructions like TEST and CMP with
480 * memory/register as the destination.
481 */
482#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
484 \
485 /* \
486 * If rm is denoting a register, no more instruction bytes. \
487 */ \
488 if (IEM_IS_MODRM_REG_MODE(bRm)) \
489 { \
490 switch (pVCpu->iem.s.enmEffOpSize) \
491 { \
492 case IEMMODE_16BIT: \
493 IEM_MC_BEGIN(3, 0); \
494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
495 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
496 IEM_MC_ARG(uint16_t, u16Src, 1); \
497 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
498 \
499 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
500 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
501 IEM_MC_REF_EFLAGS(pEFlags); \
502 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
503 \
504 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
505 IEM_MC_END(); \
506 break; \
507 \
508 case IEMMODE_32BIT: \
509 IEM_MC_BEGIN(3, 0); \
510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
511 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
512 IEM_MC_ARG(uint32_t, u32Src, 1); \
513 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
514 \
515 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
516 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
517 IEM_MC_REF_EFLAGS(pEFlags); \
518 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
519 \
520 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
521 IEM_MC_END(); \
522 break; \
523 \
524 case IEMMODE_64BIT: \
525 IEM_MC_BEGIN(3, 0); \
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
527 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
528 IEM_MC_ARG(uint64_t, u64Src, 1); \
529 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
530 \
531 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
532 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
533 IEM_MC_REF_EFLAGS(pEFlags); \
534 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
535 \
536 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
537 IEM_MC_END(); \
538 break; \
539 \
540 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
541 } \
542 } \
543 else \
544 { \
545 /* \
546 * We're accessing memory. \
547 * Note! We're putting the eflags on the stack here so we can commit them \
548 * after the memory. \
549 */ \
550 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
551 { \
552 switch (pVCpu->iem.s.enmEffOpSize) \
553 { \
554 case IEMMODE_16BIT: \
555 IEM_MC_BEGIN(3, 3); \
556 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
557 IEM_MC_ARG(uint16_t, u16Src, 1); \
558 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
560 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
561 \
562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
563 IEMOP_HLP_DONE_DECODING(); \
564 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
565 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
566 IEM_MC_FETCH_EFLAGS(EFlags); \
567 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
568 \
569 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
570 IEM_MC_COMMIT_EFLAGS(EFlags); \
571 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
572 IEM_MC_END(); \
573 break; \
574 \
575 case IEMMODE_32BIT: \
576 IEM_MC_BEGIN(3, 3); \
577 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
578 IEM_MC_ARG(uint32_t, u32Src, 1); \
579 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
581 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
582 \
583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
584 IEMOP_HLP_DONE_DECODING(); \
585 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
586 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
587 IEM_MC_FETCH_EFLAGS(EFlags); \
588 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
589 \
590 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
591 IEM_MC_COMMIT_EFLAGS(EFlags); \
592 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
593 IEM_MC_END(); \
594 break; \
595 \
596 case IEMMODE_64BIT: \
597 IEM_MC_BEGIN(3, 3); \
598 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
599 IEM_MC_ARG(uint64_t, u64Src, 1); \
600 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
602 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
603 \
604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
605 IEMOP_HLP_DONE_DECODING(); \
606 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
607 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
608 IEM_MC_FETCH_EFLAGS(EFlags); \
609 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
610 \
611 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
612 IEM_MC_COMMIT_EFLAGS(EFlags); \
613 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
614 IEM_MC_END(); \
615 break; \
616 \
617 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
618 } \
619 } \
620 else \
621 { \
622 IEMOP_HLP_DONE_DECODING(); \
623 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
624 } \
625 } \
626 (void)0
627
628
629/**
630 * Body for instructions like ADD, AND, OR, ++ with working on AL with
631 * a byte immediate.
632 */
633#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
634 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
635 \
636 IEM_MC_BEGIN(3, 0); \
637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
638 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
639 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
640 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
641 \
642 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
643 IEM_MC_REF_EFLAGS(pEFlags); \
644 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
645 \
646 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
647 IEM_MC_END()
648
649/**
650 * Body for instructions like ADD, AND, OR, ++ with working on
651 * AX/EAX/RAX with a word/dword immediate.
652 */
653#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
654 switch (pVCpu->iem.s.enmEffOpSize) \
655 { \
656 case IEMMODE_16BIT: \
657 { \
658 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
659 \
660 IEM_MC_BEGIN(3, 0); \
661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
662 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
663 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
664 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
665 \
666 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
667 IEM_MC_REF_EFLAGS(pEFlags); \
668 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
669 \
670 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
671 IEM_MC_END(); \
672 } \
673 \
674 case IEMMODE_32BIT: \
675 { \
676 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
677 \
678 IEM_MC_BEGIN(3, 0); \
679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
680 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
681 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
682 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
683 \
684 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
685 IEM_MC_REF_EFLAGS(pEFlags); \
686 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
687 \
688 if (a_fModifiesDstReg) \
689 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
690 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
691 IEM_MC_END(); \
692 } \
693 \
694 case IEMMODE_64BIT: \
695 { \
696 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
697 \
698 IEM_MC_BEGIN(3, 0); \
699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
700 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
701 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
702 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
703 \
704 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
705 IEM_MC_REF_EFLAGS(pEFlags); \
706 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
707 \
708 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
709 IEM_MC_END(); \
710 } \
711 \
712 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
713 } \
714 (void)0
715
716
717
718/* Instruction specification format - work in progress: */
719
720/**
721 * @opcode 0x00
722 * @opmnemonic add
723 * @op1 rm:Eb
724 * @op2 reg:Gb
725 * @opmaps one
726 * @openc ModR/M
727 * @opflmodify cf,pf,af,zf,sf,of
728 * @ophints harmless ignores_op_sizes
729 * @opstats add_Eb_Gb
730 * @opgroup og_gen_arith_bin
731 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
732 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
733 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
734 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
735 */
736FNIEMOP_DEF(iemOp_add_Eb_Gb)
737{
738 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
739 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_add_u8);
740 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked);
741}
742
743
744/**
745 * @opcode 0x01
746 * @opgroup og_gen_arith_bin
747 * @opflmodify cf,pf,af,zf,sf,of
748 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
749 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
750 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
751 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
752 */
753FNIEMOP_DEF(iemOp_add_Ev_Gv)
754{
755 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
756 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
757 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
758}
759
760
761/**
762 * @opcode 0x02
763 * @opgroup og_gen_arith_bin
764 * @opflmodify cf,pf,af,zf,sf,of
765 * @opcopytests iemOp_add_Eb_Gb
766 */
767FNIEMOP_DEF(iemOp_add_Gb_Eb)
768{
769 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
770 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
771}
772
773
774/**
775 * @opcode 0x03
776 * @opgroup og_gen_arith_bin
777 * @opflmodify cf,pf,af,zf,sf,of
778 * @opcopytests iemOp_add_Ev_Gv
779 */
780FNIEMOP_DEF(iemOp_add_Gv_Ev)
781{
782 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
783 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
784}
785
786
787/**
788 * @opcode 0x04
789 * @opgroup og_gen_arith_bin
790 * @opflmodify cf,pf,af,zf,sf,of
791 * @opcopytests iemOp_add_Eb_Gb
792 */
793FNIEMOP_DEF(iemOp_add_Al_Ib)
794{
795 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
796 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
797}
798
799
800/**
801 * @opcode 0x05
802 * @opgroup og_gen_arith_bin
803 * @opflmodify cf,pf,af,zf,sf,of
804 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
805 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
806 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
807 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
808 */
809FNIEMOP_DEF(iemOp_add_eAX_Iz)
810{
811 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
812 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
813}
814
815
816/**
817 * @opcode 0x06
818 * @opgroup og_stack_sreg
819 */
820FNIEMOP_DEF(iemOp_push_ES)
821{
822 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
823 IEMOP_HLP_NO_64BIT();
824 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
825}
826
827
828/**
829 * @opcode 0x07
830 * @opgroup og_stack_sreg
831 */
832FNIEMOP_DEF(iemOp_pop_ES)
833{
834 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
835 IEMOP_HLP_NO_64BIT();
836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
837 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
838}
839
840
841/**
842 * @opcode 0x08
843 * @opgroup og_gen_arith_bin
844 * @opflmodify cf,pf,af,zf,sf,of
845 * @opflundef af
846 * @opflclear of,cf
847 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
848 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
849 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
850 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
851 */
852FNIEMOP_DEF(iemOp_or_Eb_Gb)
853{
854 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
855 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
856 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_or_u8);
857 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked);
858}
859
860
861/*
862 * @opcode 0x09
863 * @opgroup og_gen_arith_bin
864 * @opflmodify cf,pf,af,zf,sf,of
865 * @opflundef af
866 * @opflclear of,cf
867 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
868 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
869 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
870 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
871 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
872 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
873 */
874FNIEMOP_DEF(iemOp_or_Ev_Gv)
875{
876 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
877 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
878 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
879 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
880}
881
882
883/**
884 * @opcode 0x0a
885 * @opgroup og_gen_arith_bin
886 * @opflmodify cf,pf,af,zf,sf,of
887 * @opflundef af
888 * @opflclear of,cf
889 * @opcopytests iemOp_or_Eb_Gb
890 */
891FNIEMOP_DEF(iemOp_or_Gb_Eb)
892{
893 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
894 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
895 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
896}
897
898
899/**
900 * @opcode 0x0b
901 * @opgroup og_gen_arith_bin
902 * @opflmodify cf,pf,af,zf,sf,of
903 * @opflundef af
904 * @opflclear of,cf
905 * @opcopytests iemOp_or_Ev_Gv
906 */
907FNIEMOP_DEF(iemOp_or_Gv_Ev)
908{
909 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
910 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
911 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
912}
913
914
915/**
916 * @opcode 0x0c
917 * @opgroup og_gen_arith_bin
918 * @opflmodify cf,pf,af,zf,sf,of
919 * @opflundef af
920 * @opflclear of,cf
921 * @opcopytests iemOp_or_Eb_Gb
922 */
923FNIEMOP_DEF(iemOp_or_Al_Ib)
924{
925 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
926 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
927 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
928}
929
930
931/**
932 * @opcode 0x0d
933 * @opgroup og_gen_arith_bin
934 * @opflmodify cf,pf,af,zf,sf,of
935 * @opflundef af
936 * @opflclear of,cf
937 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
938 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
939 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
940 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
941 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
942 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
943 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
944 */
945FNIEMOP_DEF(iemOp_or_eAX_Iz)
946{
947 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
948 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
949 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
950}
951
952
953/**
954 * @opcode 0x0e
955 * @opgroup og_stack_sreg
956 */
957FNIEMOP_DEF(iemOp_push_CS)
958{
959 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
960 IEMOP_HLP_NO_64BIT();
961 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
962}
963
964
965/**
966 * @opcode 0x0f
967 * @opmnemonic EscTwo0f
968 * @openc two0f
969 * @opdisenum OP_2B_ESC
970 * @ophints harmless
971 * @opgroup og_escapes
972 */
973FNIEMOP_DEF(iemOp_2byteEscape)
974{
975#if 0 /// @todo def VBOX_STRICT
976 /* Sanity check the table the first time around. */
977 static bool s_fTested = false;
978 if (RT_LIKELY(s_fTested)) { /* likely */ }
979 else
980 {
981 s_fTested = true;
982 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
983 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
984 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
985 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
986 }
987#endif
988
989 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
990 {
991 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
992 IEMOP_HLP_MIN_286();
993 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
994 }
995 /* @opdone */
996
997 /*
998 * On the 8086 this is a POP CS instruction.
999 * For the time being we don't specify this this.
1000 */
1001 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
1002 IEMOP_HLP_NO_64BIT();
1003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1004 /** @todo eliminate END_TB here */
1005 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1006 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1007}
1008
1009/**
1010 * @opcode 0x10
1011 * @opgroup og_gen_arith_bin
1012 * @opfltest cf
1013 * @opflmodify cf,pf,af,zf,sf,of
1014 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1015 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1016 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1017 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1018 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1019 */
1020FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1021{
1022 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1023 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_adc_u8);
1024 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked);
1025}
1026
1027
1028/**
1029 * @opcode 0x11
1030 * @opgroup og_gen_arith_bin
1031 * @opfltest cf
1032 * @opflmodify cf,pf,af,zf,sf,of
1033 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1034 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1035 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1036 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1037 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1038 */
1039FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1040{
1041 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1042 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
1043 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1044}
1045
1046
1047/**
1048 * @opcode 0x12
1049 * @opgroup og_gen_arith_bin
1050 * @opfltest cf
1051 * @opflmodify cf,pf,af,zf,sf,of
1052 * @opcopytests iemOp_adc_Eb_Gb
1053 */
1054FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1055{
1056 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1057 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
1058}
1059
1060
1061/**
1062 * @opcode 0x13
1063 * @opgroup og_gen_arith_bin
1064 * @opfltest cf
1065 * @opflmodify cf,pf,af,zf,sf,of
1066 * @opcopytests iemOp_adc_Ev_Gv
1067 */
1068FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1069{
1070 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1071 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1072}
1073
1074
1075/**
1076 * @opcode 0x14
1077 * @opgroup og_gen_arith_bin
1078 * @opfltest cf
1079 * @opflmodify cf,pf,af,zf,sf,of
1080 * @opcopytests iemOp_adc_Eb_Gb
1081 */
1082FNIEMOP_DEF(iemOp_adc_Al_Ib)
1083{
1084 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1085 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1086}
1087
1088
1089/**
1090 * @opcode 0x15
1091 * @opgroup og_gen_arith_bin
1092 * @opfltest cf
1093 * @opflmodify cf,pf,af,zf,sf,of
1094 * @opcopytests iemOp_adc_Ev_Gv
1095 */
1096FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1097{
1098 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1099 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1100}
1101
1102
1103/**
1104 * @opcode 0x16
1105 */
1106FNIEMOP_DEF(iemOp_push_SS)
1107{
1108 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1109 IEMOP_HLP_NO_64BIT();
1110 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1111}
1112
1113
1114/**
1115 * @opcode 0x17
1116 * @opgroup og_gen_arith_bin
1117 * @opfltest cf
1118 * @opflmodify cf,pf,af,zf,sf,of
1119 */
1120FNIEMOP_DEF(iemOp_pop_SS)
1121{
1122 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1124 IEMOP_HLP_NO_64BIT();
1125 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1126}
1127
1128
1129/**
1130 * @opcode 0x18
1131 * @opgroup og_gen_arith_bin
1132 * @opfltest cf
1133 * @opflmodify cf,pf,af,zf,sf,of
1134 */
1135FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1136{
1137 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1138 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sbb_u8);
1139 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked);
1140}
1141
1142
1143/**
1144 * @opcode 0x19
1145 * @opgroup og_gen_arith_bin
1146 * @opfltest cf
1147 * @opflmodify cf,pf,af,zf,sf,of
1148 */
1149FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1150{
1151 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1152 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
1153 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1154}
1155
1156
1157/**
1158 * @opcode 0x1a
1159 * @opgroup og_gen_arith_bin
1160 * @opfltest cf
1161 * @opflmodify cf,pf,af,zf,sf,of
1162 */
1163FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1164{
1165 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1166 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
1167}
1168
1169
1170/**
1171 * @opcode 0x1b
1172 * @opgroup og_gen_arith_bin
1173 * @opfltest cf
1174 * @opflmodify cf,pf,af,zf,sf,of
1175 */
1176FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1177{
1178 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1179 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1180}
1181
1182
1183/**
1184 * @opcode 0x1c
1185 * @opgroup og_gen_arith_bin
1186 * @opfltest cf
1187 * @opflmodify cf,pf,af,zf,sf,of
1188 */
1189FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1190{
1191 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1192 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1193}
1194
1195
1196/**
1197 * @opcode 0x1d
1198 * @opgroup og_gen_arith_bin
1199 * @opfltest cf
1200 * @opflmodify cf,pf,af,zf,sf,of
1201 */
1202FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1203{
1204 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1205 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1206}
1207
1208
1209/**
1210 * @opcode 0x1e
1211 * @opgroup og_stack_sreg
1212 */
1213FNIEMOP_DEF(iemOp_push_DS)
1214{
1215 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1216 IEMOP_HLP_NO_64BIT();
1217 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1218}
1219
1220
1221/**
1222 * @opcode 0x1f
1223 * @opgroup og_stack_sreg
1224 */
1225FNIEMOP_DEF(iemOp_pop_DS)
1226{
1227 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1229 IEMOP_HLP_NO_64BIT();
1230 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1231}
1232
1233
1234/**
1235 * @opcode 0x20
1236 * @opgroup og_gen_arith_bin
1237 * @opflmodify cf,pf,af,zf,sf,of
1238 * @opflundef af
1239 * @opflclear of,cf
1240 */
1241FNIEMOP_DEF(iemOp_and_Eb_Gb)
1242{
1243 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1244 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1245 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_and_u8);
1246 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked);
1247}
1248
1249
1250/**
1251 * @opcode 0x21
1252 * @opgroup og_gen_arith_bin
1253 * @opflmodify cf,pf,af,zf,sf,of
1254 * @opflundef af
1255 * @opflclear of,cf
1256 */
1257FNIEMOP_DEF(iemOp_and_Ev_Gv)
1258{
1259 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1260 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1261 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
1262 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1263}
1264
1265
1266/**
1267 * @opcode 0x22
1268 * @opgroup og_gen_arith_bin
1269 * @opflmodify cf,pf,af,zf,sf,of
1270 * @opflundef af
1271 * @opflclear of,cf
1272 */
1273FNIEMOP_DEF(iemOp_and_Gb_Eb)
1274{
1275 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1276 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1277 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1278}
1279
1280
1281/**
1282 * @opcode 0x23
1283 * @opgroup og_gen_arith_bin
1284 * @opflmodify cf,pf,af,zf,sf,of
1285 * @opflundef af
1286 * @opflclear of,cf
1287 */
1288FNIEMOP_DEF(iemOp_and_Gv_Ev)
1289{
1290 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1291 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1292 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1293}
1294
1295
1296/**
1297 * @opcode 0x24
1298 * @opgroup og_gen_arith_bin
1299 * @opflmodify cf,pf,af,zf,sf,of
1300 * @opflundef af
1301 * @opflclear of,cf
1302 */
1303FNIEMOP_DEF(iemOp_and_Al_Ib)
1304{
1305 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1306 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1307 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1308}
1309
1310
1311/**
1312 * @opcode 0x25
1313 * @opgroup og_gen_arith_bin
1314 * @opflmodify cf,pf,af,zf,sf,of
1315 * @opflundef af
1316 * @opflclear of,cf
1317 */
1318FNIEMOP_DEF(iemOp_and_eAX_Iz)
1319{
1320 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1321 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1322 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1323}
1324
1325
1326/**
1327 * @opcode 0x26
1328 * @opmnemonic SEG
1329 * @op1 ES
1330 * @opgroup og_prefix
1331 * @openc prefix
1332 * @opdisenum OP_SEG
1333 * @ophints harmless
1334 */
1335FNIEMOP_DEF(iemOp_seg_ES)
1336{
1337 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1338 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1339 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1340
1341 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1342 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1343}
1344
1345
1346/**
1347 * @opcode 0x27
1348 * @opfltest af,cf
1349 * @opflmodify cf,pf,af,zf,sf,of
1350 * @opflundef of
1351 */
1352FNIEMOP_DEF(iemOp_daa)
1353{
1354 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1355 IEMOP_HLP_NO_64BIT();
1356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1357 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1358 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_daa);
1359}
1360
1361
1362/**
1363 * @opcode 0x28
1364 * @opgroup og_gen_arith_bin
1365 * @opflmodify cf,pf,af,zf,sf,of
1366 */
1367FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1368{
1369 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1370 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sub_u8);
1371 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked);
1372}
1373
1374
1375/**
1376 * @opcode 0x29
1377 * @opgroup og_gen_arith_bin
1378 * @opflmodify cf,pf,af,zf,sf,of
1379 */
1380FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1381{
1382 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1383 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
1384 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1385}
1386
1387
1388/**
1389 * @opcode 0x2a
1390 * @opgroup og_gen_arith_bin
1391 * @opflmodify cf,pf,af,zf,sf,of
1392 */
1393FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1394{
1395 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1396 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1397}
1398
1399
1400/**
1401 * @opcode 0x2b
1402 * @opgroup og_gen_arith_bin
1403 * @opflmodify cf,pf,af,zf,sf,of
1404 */
1405FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1406{
1407 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1408 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1409}
1410
1411
1412/**
1413 * @opcode 0x2c
1414 * @opgroup og_gen_arith_bin
1415 * @opflmodify cf,pf,af,zf,sf,of
1416 */
1417FNIEMOP_DEF(iemOp_sub_Al_Ib)
1418{
1419 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1420 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1421}
1422
1423
1424/**
1425 * @opcode 0x2d
1426 * @opgroup og_gen_arith_bin
1427 * @opflmodify cf,pf,af,zf,sf,of
1428 */
1429FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1430{
1431 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1432 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1433}
1434
1435
1436/**
1437 * @opcode 0x2e
1438 * @opmnemonic SEG
1439 * @op1 CS
1440 * @opgroup og_prefix
1441 * @openc prefix
1442 * @opdisenum OP_SEG
1443 * @ophints harmless
1444 */
1445FNIEMOP_DEF(iemOp_seg_CS)
1446{
1447 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1448 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1449 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1450
1451 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1452 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1453}
1454
1455
1456/**
1457 * @opcode 0x2f
1458 * @opfltest af,cf
1459 * @opflmodify cf,pf,af,zf,sf,of
1460 * @opflundef of
1461 */
1462FNIEMOP_DEF(iemOp_das)
1463{
1464 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1465 IEMOP_HLP_NO_64BIT();
1466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1467 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1468 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_das);
1469}
1470
1471
1472/**
1473 * @opcode 0x30
1474 * @opgroup og_gen_arith_bin
1475 * @opflmodify cf,pf,af,zf,sf,of
1476 * @opflundef af
1477 * @opflclear of,cf
1478 */
1479FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1480{
1481 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1482 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1483 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_xor_u8);
1484 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked);
1485}
1486
1487
1488/**
1489 * @opcode 0x31
1490 * @opgroup og_gen_arith_bin
1491 * @opflmodify cf,pf,af,zf,sf,of
1492 * @opflundef af
1493 * @opflclear of,cf
1494 */
1495FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1496{
1497 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1498 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1499 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
1500 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1501}
1502
1503
1504/**
1505 * @opcode 0x32
1506 * @opgroup og_gen_arith_bin
1507 * @opflmodify cf,pf,af,zf,sf,of
1508 * @opflundef af
1509 * @opflclear of,cf
1510 */
1511FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1512{
1513 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1514 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1515 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1516}
1517
1518
1519/**
1520 * @opcode 0x33
1521 * @opgroup og_gen_arith_bin
1522 * @opflmodify cf,pf,af,zf,sf,of
1523 * @opflundef af
1524 * @opflclear of,cf
1525 */
1526FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1527{
1528 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1529 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1530 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1531}
1532
1533
1534/**
1535 * @opcode 0x34
1536 * @opgroup og_gen_arith_bin
1537 * @opflmodify cf,pf,af,zf,sf,of
1538 * @opflundef af
1539 * @opflclear of,cf
1540 */
1541FNIEMOP_DEF(iemOp_xor_Al_Ib)
1542{
1543 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1544 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1545 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1546}
1547
1548
1549/**
1550 * @opcode 0x35
1551 * @opgroup og_gen_arith_bin
1552 * @opflmodify cf,pf,af,zf,sf,of
1553 * @opflundef af
1554 * @opflclear of,cf
1555 */
1556FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1557{
1558 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1559 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1560 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1561}
1562
1563
1564/**
1565 * @opcode 0x36
1566 * @opmnemonic SEG
1567 * @op1 SS
1568 * @opgroup og_prefix
1569 * @openc prefix
1570 * @opdisenum OP_SEG
1571 * @ophints harmless
1572 */
1573FNIEMOP_DEF(iemOp_seg_SS)
1574{
1575 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1576 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1577 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1578
1579 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1580 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1581}
1582
1583
1584/**
1585 * @opcode 0x37
1586 * @opfltest af,cf
1587 * @opflmodify cf,pf,af,zf,sf,of
1588 * @opflundef pf,zf,sf,of
1589 * @opgroup og_gen_arith_dec
1590 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1591 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1592 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1593 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1594 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1595 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1596 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1597 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1598 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1599 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1600 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1601 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1602 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1603 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1604 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1605 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1606 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1607 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1608 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1609 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1610 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1611 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1612 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1613 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1614 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1615 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1616 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1617 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1618 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1619 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1620 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1621 */
1622FNIEMOP_DEF(iemOp_aaa)
1623{
1624 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1625 IEMOP_HLP_NO_64BIT();
1626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1627 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1628
1629 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aaa);
1630}
1631
1632
1633/**
1634 * @opcode 0x38
1635 */
1636FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1637{
1638 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1639 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8);
1640 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
1641}
1642
1643
1644/**
1645 * @opcode 0x39
1646 */
1647FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1648{
1649 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1650 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
1651}
1652
1653
1654/**
1655 * @opcode 0x3a
1656 */
1657FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1658{
1659 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1660 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1661}
1662
1663
1664/**
1665 * @opcode 0x3b
1666 */
1667FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1668{
1669 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1670 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1671}
1672
1673
1674/**
1675 * @opcode 0x3c
1676 */
1677FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1678{
1679 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1680 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1681}
1682
1683
1684/**
1685 * @opcode 0x3d
1686 */
1687FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1688{
1689 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1690 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1691}
1692
1693
1694/**
1695 * @opcode 0x3e
1696 */
1697FNIEMOP_DEF(iemOp_seg_DS)
1698{
1699 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1700 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1701 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1702
1703 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1704 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1705}
1706
1707
1708/**
1709 * @opcode 0x3f
1710 * @opfltest af,cf
1711 * @opflmodify cf,pf,af,zf,sf,of
1712 * @opflundef pf,zf,sf,of
1713 * @opgroup og_gen_arith_dec
1714 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1715 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1716 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1717 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1718 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1719 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1720 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1721 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1722 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1723 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1724 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1725 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1726 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1727 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1728 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1729 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1730 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1731 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1732 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1733 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1734 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1735 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1736 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1737 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1738 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1739 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1740 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1741 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1742 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1743 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1744 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1745 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1746 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1747 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1748 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1749 */
1750FNIEMOP_DEF(iemOp_aas)
1751{
1752 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1753 IEMOP_HLP_NO_64BIT();
1754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1755 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1756
1757 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aas);
1758}
1759
1760
1761/**
1762 * Common 'inc/dec register' helper.
1763 *
1764 * Not for 64-bit code, only for what became the rex prefixes.
1765 */
1766#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1767 switch (pVCpu->iem.s.enmEffOpSize) \
1768 { \
1769 case IEMMODE_16BIT: \
1770 IEM_MC_BEGIN(2, 0); \
1771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1772 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1773 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1774 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1775 IEM_MC_REF_EFLAGS(pEFlags); \
1776 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1777 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1778 IEM_MC_END(); \
1779 break; \
1780 \
1781 case IEMMODE_32BIT: \
1782 IEM_MC_BEGIN(2, 0); \
1783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1784 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1785 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1786 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1787 IEM_MC_REF_EFLAGS(pEFlags); \
1788 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1789 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
1790 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1791 IEM_MC_END(); \
1792 break; \
1793 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1794 } \
1795 (void)0
1796
1797/**
1798 * @opcode 0x40
1799 */
1800FNIEMOP_DEF(iemOp_inc_eAX)
1801{
1802 /*
1803 * This is a REX prefix in 64-bit mode.
1804 */
1805 if (IEM_IS_64BIT_CODE(pVCpu))
1806 {
1807 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1808 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1809
1810 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1811 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1812 }
1813
1814 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1815 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1816}
1817
1818
1819/**
1820 * @opcode 0x41
1821 */
1822FNIEMOP_DEF(iemOp_inc_eCX)
1823{
1824 /*
1825 * This is a REX prefix in 64-bit mode.
1826 */
1827 if (IEM_IS_64BIT_CODE(pVCpu))
1828 {
1829 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1830 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1831 pVCpu->iem.s.uRexB = 1 << 3;
1832
1833 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1834 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1835 }
1836
1837 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1838 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1839}
1840
1841
1842/**
1843 * @opcode 0x42
1844 */
1845FNIEMOP_DEF(iemOp_inc_eDX)
1846{
1847 /*
1848 * This is a REX prefix in 64-bit mode.
1849 */
1850 if (IEM_IS_64BIT_CODE(pVCpu))
1851 {
1852 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1853 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1854 pVCpu->iem.s.uRexIndex = 1 << 3;
1855
1856 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1857 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1858 }
1859
1860 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1861 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1862}
1863
1864
1865
1866/**
1867 * @opcode 0x43
1868 */
1869FNIEMOP_DEF(iemOp_inc_eBX)
1870{
1871 /*
1872 * This is a REX prefix in 64-bit mode.
1873 */
1874 if (IEM_IS_64BIT_CODE(pVCpu))
1875 {
1876 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1877 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1878 pVCpu->iem.s.uRexB = 1 << 3;
1879 pVCpu->iem.s.uRexIndex = 1 << 3;
1880
1881 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1882 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1883 }
1884
1885 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1886 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1887}
1888
1889
1890/**
1891 * @opcode 0x44
1892 */
1893FNIEMOP_DEF(iemOp_inc_eSP)
1894{
1895 /*
1896 * This is a REX prefix in 64-bit mode.
1897 */
1898 if (IEM_IS_64BIT_CODE(pVCpu))
1899 {
1900 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1901 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1902 pVCpu->iem.s.uRexReg = 1 << 3;
1903
1904 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1905 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1906 }
1907
1908 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1909 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1910}
1911
1912
1913/**
1914 * @opcode 0x45
1915 */
1916FNIEMOP_DEF(iemOp_inc_eBP)
1917{
1918 /*
1919 * This is a REX prefix in 64-bit mode.
1920 */
1921 if (IEM_IS_64BIT_CODE(pVCpu))
1922 {
1923 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1924 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1925 pVCpu->iem.s.uRexReg = 1 << 3;
1926 pVCpu->iem.s.uRexB = 1 << 3;
1927
1928 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1929 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1930 }
1931
1932 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1933 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1934}
1935
1936
1937/**
1938 * @opcode 0x46
1939 */
1940FNIEMOP_DEF(iemOp_inc_eSI)
1941{
1942 /*
1943 * This is a REX prefix in 64-bit mode.
1944 */
1945 if (IEM_IS_64BIT_CODE(pVCpu))
1946 {
1947 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1948 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1949 pVCpu->iem.s.uRexReg = 1 << 3;
1950 pVCpu->iem.s.uRexIndex = 1 << 3;
1951
1952 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1953 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1954 }
1955
1956 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1957 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1958}
1959
1960
1961/**
1962 * @opcode 0x47
1963 */
1964FNIEMOP_DEF(iemOp_inc_eDI)
1965{
1966 /*
1967 * This is a REX prefix in 64-bit mode.
1968 */
1969 if (IEM_IS_64BIT_CODE(pVCpu))
1970 {
1971 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1972 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1973 pVCpu->iem.s.uRexReg = 1 << 3;
1974 pVCpu->iem.s.uRexB = 1 << 3;
1975 pVCpu->iem.s.uRexIndex = 1 << 3;
1976
1977 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1978 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1979 }
1980
1981 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1982 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
1983}
1984
1985
1986/**
1987 * @opcode 0x48
1988 */
1989FNIEMOP_DEF(iemOp_dec_eAX)
1990{
1991 /*
1992 * This is a REX prefix in 64-bit mode.
1993 */
1994 if (IEM_IS_64BIT_CODE(pVCpu))
1995 {
1996 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1997 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1998 iemRecalEffOpSize(pVCpu);
1999
2000 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2001 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2002 }
2003
2004 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2005 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2006}
2007
2008
2009/**
2010 * @opcode 0x49
2011 */
2012FNIEMOP_DEF(iemOp_dec_eCX)
2013{
2014 /*
2015 * This is a REX prefix in 64-bit mode.
2016 */
2017 if (IEM_IS_64BIT_CODE(pVCpu))
2018 {
2019 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2020 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2021 pVCpu->iem.s.uRexB = 1 << 3;
2022 iemRecalEffOpSize(pVCpu);
2023
2024 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2025 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2026 }
2027
2028 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2029 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2030}
2031
2032
2033/**
2034 * @opcode 0x4a
2035 */
2036FNIEMOP_DEF(iemOp_dec_eDX)
2037{
2038 /*
2039 * This is a REX prefix in 64-bit mode.
2040 */
2041 if (IEM_IS_64BIT_CODE(pVCpu))
2042 {
2043 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2044 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2045 pVCpu->iem.s.uRexIndex = 1 << 3;
2046 iemRecalEffOpSize(pVCpu);
2047
2048 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2049 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2050 }
2051
2052 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2053 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2054}
2055
2056
2057/**
2058 * @opcode 0x4b
2059 */
2060FNIEMOP_DEF(iemOp_dec_eBX)
2061{
2062 /*
2063 * This is a REX prefix in 64-bit mode.
2064 */
2065 if (IEM_IS_64BIT_CODE(pVCpu))
2066 {
2067 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2068 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2069 pVCpu->iem.s.uRexB = 1 << 3;
2070 pVCpu->iem.s.uRexIndex = 1 << 3;
2071 iemRecalEffOpSize(pVCpu);
2072
2073 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2074 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2075 }
2076
2077 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2078 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2079}
2080
2081
2082/**
2083 * @opcode 0x4c
2084 */
2085FNIEMOP_DEF(iemOp_dec_eSP)
2086{
2087 /*
2088 * This is a REX prefix in 64-bit mode.
2089 */
2090 if (IEM_IS_64BIT_CODE(pVCpu))
2091 {
2092 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2093 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2094 pVCpu->iem.s.uRexReg = 1 << 3;
2095 iemRecalEffOpSize(pVCpu);
2096
2097 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2098 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2099 }
2100
2101 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2102 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2103}
2104
2105
2106/**
2107 * @opcode 0x4d
2108 */
2109FNIEMOP_DEF(iemOp_dec_eBP)
2110{
2111 /*
2112 * This is a REX prefix in 64-bit mode.
2113 */
2114 if (IEM_IS_64BIT_CODE(pVCpu))
2115 {
2116 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2117 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2118 pVCpu->iem.s.uRexReg = 1 << 3;
2119 pVCpu->iem.s.uRexB = 1 << 3;
2120 iemRecalEffOpSize(pVCpu);
2121
2122 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2123 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2124 }
2125
2126 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2127 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2128}
2129
2130
2131/**
2132 * @opcode 0x4e
2133 */
2134FNIEMOP_DEF(iemOp_dec_eSI)
2135{
2136 /*
2137 * This is a REX prefix in 64-bit mode.
2138 */
2139 if (IEM_IS_64BIT_CODE(pVCpu))
2140 {
2141 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2142 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2143 pVCpu->iem.s.uRexReg = 1 << 3;
2144 pVCpu->iem.s.uRexIndex = 1 << 3;
2145 iemRecalEffOpSize(pVCpu);
2146
2147 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2148 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2149 }
2150
2151 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2152 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2153}
2154
2155
2156/**
2157 * @opcode 0x4f
2158 */
2159FNIEMOP_DEF(iemOp_dec_eDI)
2160{
2161 /*
2162 * This is a REX prefix in 64-bit mode.
2163 */
2164 if (IEM_IS_64BIT_CODE(pVCpu))
2165 {
2166 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2167 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2168 pVCpu->iem.s.uRexReg = 1 << 3;
2169 pVCpu->iem.s.uRexB = 1 << 3;
2170 pVCpu->iem.s.uRexIndex = 1 << 3;
2171 iemRecalEffOpSize(pVCpu);
2172
2173 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2174 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2175 }
2176
2177 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2178 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2179}
2180
2181
2182/**
2183 * Common 'push register' helper.
2184 */
2185FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2186{
2187 if (IEM_IS_64BIT_CODE(pVCpu))
2188 {
2189 iReg |= pVCpu->iem.s.uRexB;
2190 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2191 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2192 }
2193
2194 switch (pVCpu->iem.s.enmEffOpSize)
2195 {
2196 case IEMMODE_16BIT:
2197 IEM_MC_BEGIN(0, 1);
2198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2199 IEM_MC_LOCAL(uint16_t, u16Value);
2200 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2201 IEM_MC_PUSH_U16(u16Value);
2202 IEM_MC_ADVANCE_RIP_AND_FINISH();
2203 IEM_MC_END();
2204 break;
2205
2206 case IEMMODE_32BIT:
2207 IEM_MC_BEGIN(0, 1);
2208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2209 IEM_MC_LOCAL(uint32_t, u32Value);
2210 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2211 IEM_MC_PUSH_U32(u32Value);
2212 IEM_MC_ADVANCE_RIP_AND_FINISH();
2213 IEM_MC_END();
2214 break;
2215
2216 case IEMMODE_64BIT:
2217 IEM_MC_BEGIN(0, 1);
2218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2219 IEM_MC_LOCAL(uint64_t, u64Value);
2220 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2221 IEM_MC_PUSH_U64(u64Value);
2222 IEM_MC_ADVANCE_RIP_AND_FINISH();
2223 IEM_MC_END();
2224 break;
2225
2226 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2227 }
2228}
2229
2230
2231/**
2232 * @opcode 0x50
2233 */
2234FNIEMOP_DEF(iemOp_push_eAX)
2235{
2236 IEMOP_MNEMONIC(push_rAX, "push rAX");
2237 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2238}
2239
2240
2241/**
2242 * @opcode 0x51
2243 */
2244FNIEMOP_DEF(iemOp_push_eCX)
2245{
2246 IEMOP_MNEMONIC(push_rCX, "push rCX");
2247 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2248}
2249
2250
2251/**
2252 * @opcode 0x52
2253 */
2254FNIEMOP_DEF(iemOp_push_eDX)
2255{
2256 IEMOP_MNEMONIC(push_rDX, "push rDX");
2257 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2258}
2259
2260
2261/**
2262 * @opcode 0x53
2263 */
2264FNIEMOP_DEF(iemOp_push_eBX)
2265{
2266 IEMOP_MNEMONIC(push_rBX, "push rBX");
2267 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2268}
2269
2270
2271/**
2272 * @opcode 0x54
2273 */
2274FNIEMOP_DEF(iemOp_push_eSP)
2275{
2276 IEMOP_MNEMONIC(push_rSP, "push rSP");
2277 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
2278 {
2279 IEM_MC_BEGIN(0, 1);
2280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2281 IEM_MC_LOCAL(uint16_t, u16Value);
2282 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2283 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2284 IEM_MC_PUSH_U16(u16Value);
2285 IEM_MC_ADVANCE_RIP_AND_FINISH();
2286 IEM_MC_END();
2287 }
2288 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2289}
2290
2291
2292/**
2293 * @opcode 0x55
2294 */
2295FNIEMOP_DEF(iemOp_push_eBP)
2296{
2297 IEMOP_MNEMONIC(push_rBP, "push rBP");
2298 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2299}
2300
2301
2302/**
2303 * @opcode 0x56
2304 */
2305FNIEMOP_DEF(iemOp_push_eSI)
2306{
2307 IEMOP_MNEMONIC(push_rSI, "push rSI");
2308 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2309}
2310
2311
2312/**
2313 * @opcode 0x57
2314 */
2315FNIEMOP_DEF(iemOp_push_eDI)
2316{
2317 IEMOP_MNEMONIC(push_rDI, "push rDI");
2318 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2319}
2320
2321
2322/**
2323 * Common 'pop register' helper.
2324 */
2325FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2326{
2327 if (IEM_IS_64BIT_CODE(pVCpu))
2328 {
2329 iReg |= pVCpu->iem.s.uRexB;
2330 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2331 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2332 }
2333
2334 switch (pVCpu->iem.s.enmEffOpSize)
2335 {
2336 case IEMMODE_16BIT:
2337 IEM_MC_BEGIN(0, 1);
2338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2339 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2340 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
2341 IEM_MC_POP_U16(pu16Dst);
2342 IEM_MC_ADVANCE_RIP_AND_FINISH();
2343 IEM_MC_END();
2344 break;
2345
2346 case IEMMODE_32BIT:
2347 IEM_MC_BEGIN(0, 1);
2348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2349 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2350 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
2351 IEM_MC_POP_U32(pu32Dst);
2352 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
2353 IEM_MC_ADVANCE_RIP_AND_FINISH();
2354 IEM_MC_END();
2355 break;
2356
2357 case IEMMODE_64BIT:
2358 IEM_MC_BEGIN(0, 1);
2359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2360 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2361 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
2362 IEM_MC_POP_U64(pu64Dst);
2363 IEM_MC_ADVANCE_RIP_AND_FINISH();
2364 IEM_MC_END();
2365 break;
2366
2367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2368 }
2369}
2370
2371
2372/**
2373 * @opcode 0x58
2374 */
2375FNIEMOP_DEF(iemOp_pop_eAX)
2376{
2377 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2378 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2379}
2380
2381
2382/**
2383 * @opcode 0x59
2384 */
2385FNIEMOP_DEF(iemOp_pop_eCX)
2386{
2387 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2388 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2389}
2390
2391
2392/**
2393 * @opcode 0x5a
2394 */
2395FNIEMOP_DEF(iemOp_pop_eDX)
2396{
2397 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2398 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2399}
2400
2401
2402/**
2403 * @opcode 0x5b
2404 */
2405FNIEMOP_DEF(iemOp_pop_eBX)
2406{
2407 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2408 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2409}
2410
2411
2412/**
2413 * @opcode 0x5c
2414 */
2415FNIEMOP_DEF(iemOp_pop_eSP)
2416{
2417 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2418 if (IEM_IS_64BIT_CODE(pVCpu))
2419 {
2420 if (pVCpu->iem.s.uRexB)
2421 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2422 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2423 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2424 }
2425
2426 /** @todo add testcase for this instruction. */
2427 switch (pVCpu->iem.s.enmEffOpSize)
2428 {
2429 case IEMMODE_16BIT:
2430 IEM_MC_BEGIN(0, 1);
2431 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2432 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2433 IEM_MC_LOCAL(uint16_t, u16Dst);
2434 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
2435 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
2436 IEM_MC_ADVANCE_RIP_AND_FINISH();
2437 IEM_MC_END();
2438 break;
2439
2440 case IEMMODE_32BIT:
2441 IEM_MC_BEGIN(0, 1);
2442 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2443 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2444 IEM_MC_LOCAL(uint32_t, u32Dst);
2445 IEM_MC_POP_U32(&u32Dst);
2446 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
2447 IEM_MC_ADVANCE_RIP_AND_FINISH();
2448 IEM_MC_END();
2449 break;
2450
2451 case IEMMODE_64BIT:
2452 IEM_MC_BEGIN(0, 1);
2453 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2454 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2455 IEM_MC_LOCAL(uint64_t, u64Dst);
2456 IEM_MC_POP_U64(&u64Dst);
2457 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
2458 IEM_MC_ADVANCE_RIP_AND_FINISH();
2459 IEM_MC_END();
2460 break;
2461
2462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2463 }
2464}
2465
2466
2467/**
2468 * @opcode 0x5d
2469 */
2470FNIEMOP_DEF(iemOp_pop_eBP)
2471{
2472 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2473 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2474}
2475
2476
2477/**
2478 * @opcode 0x5e
2479 */
2480FNIEMOP_DEF(iemOp_pop_eSI)
2481{
2482 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2483 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2484}
2485
2486
2487/**
2488 * @opcode 0x5f
2489 */
2490FNIEMOP_DEF(iemOp_pop_eDI)
2491{
2492 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2493 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2494}
2495
2496
2497/**
2498 * @opcode 0x60
2499 */
2500FNIEMOP_DEF(iemOp_pusha)
2501{
2502 IEMOP_MNEMONIC(pusha, "pusha");
2503 IEMOP_HLP_MIN_186();
2504 IEMOP_HLP_NO_64BIT();
2505 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2506 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_16);
2507 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2508 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_32);
2509}
2510
2511
2512/**
2513 * @opcode 0x61
2514 */
2515FNIEMOP_DEF(iemOp_popa__mvex)
2516{
2517 if (!IEM_IS_64BIT_CODE(pVCpu))
2518 {
2519 IEMOP_MNEMONIC(popa, "popa");
2520 IEMOP_HLP_MIN_186();
2521 IEMOP_HLP_NO_64BIT();
2522 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2523 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_popa_16);
2524 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2525 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_popa_32);
2526 }
2527 IEMOP_MNEMONIC(mvex, "mvex");
2528 Log(("mvex prefix is not supported!\n"));
2529 IEMOP_RAISE_INVALID_OPCODE_RET();
2530}
2531
2532
2533/**
2534 * @opcode 0x62
2535 * @opmnemonic bound
2536 * @op1 Gv_RO
2537 * @op2 Ma
2538 * @opmincpu 80186
2539 * @ophints harmless x86_invalid_64
2540 * @optest op1=0 op2=0 ->
2541 * @optest op1=1 op2=0 -> value.xcpt=5
2542 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2543 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2544 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2545 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2546 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2547 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2548 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2549 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2550 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2551 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2552 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2553 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2554 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2555 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2556 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2557 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2558 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2559 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2560 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2561 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2562 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2563 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2564 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2565 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2566 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2567 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2568 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2569 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2570 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2571 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2572 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2573 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2574 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2575 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2576 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2577 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2578 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2579 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2580 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2581 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2582 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2583 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2584 */
2585FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2586{
2587 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2588 compatability mode it is invalid with MOD=3.
2589
2590 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2591 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2592 given as R and X without an exact description, so we assume it builds on
2593 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2594 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2595 uint8_t bRm;
2596 if (!IEM_IS_64BIT_CODE(pVCpu))
2597 {
2598 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2599 IEMOP_HLP_MIN_186();
2600 IEM_OPCODE_GET_NEXT_U8(&bRm);
2601 if (IEM_IS_MODRM_MEM_MODE(bRm))
2602 {
2603 /** @todo testcase: check that there are two memory accesses involved. Check
2604 * whether they're both read before the \#BR triggers. */
2605 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2606 {
2607 IEM_MC_BEGIN(3, 1);
2608 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2609 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2610 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2612
2613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2615
2616 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2617 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2618 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2619
2620 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2621 IEM_MC_END();
2622 }
2623 else /* 32-bit operands */
2624 {
2625 IEM_MC_BEGIN(3, 1);
2626 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2627 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2628 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2630
2631 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2633
2634 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2635 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2636 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2637
2638 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2639 IEM_MC_END();
2640 }
2641 }
2642
2643 /*
2644 * @opdone
2645 */
2646 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2647 {
2648 /* Note that there is no need for the CPU to fetch further bytes
2649 here because MODRM.MOD == 3. */
2650 Log(("evex not supported by the guest CPU!\n"));
2651 IEMOP_RAISE_INVALID_OPCODE_RET();
2652 }
2653 }
2654 else
2655 {
2656 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2657 * does modr/m read, whereas AMD probably doesn't... */
2658 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2659 {
2660 Log(("evex not supported by the guest CPU!\n"));
2661 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2662 }
2663 IEM_OPCODE_GET_NEXT_U8(&bRm);
2664 }
2665
2666 IEMOP_MNEMONIC(evex, "evex");
2667 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2668 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2669 Log(("evex prefix is not implemented!\n"));
2670 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2671}
2672
2673
2674/** Opcode 0x63 - non-64-bit modes. */
2675FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2676{
2677 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2678 IEMOP_HLP_MIN_286();
2679 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2681
2682 if (IEM_IS_MODRM_REG_MODE(bRm))
2683 {
2684 /* Register */
2685 IEM_MC_BEGIN(3, 0);
2686 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2687 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2688 IEM_MC_ARG(uint16_t, u16Src, 1);
2689 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2690
2691 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2692 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2693 IEM_MC_REF_EFLAGS(pEFlags);
2694 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2695
2696 IEM_MC_ADVANCE_RIP_AND_FINISH();
2697 IEM_MC_END();
2698 }
2699 else
2700 {
2701 /* Memory */
2702 IEM_MC_BEGIN(3, 3);
2703 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2704 IEM_MC_ARG(uint16_t, u16Src, 1);
2705 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2707 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2708
2709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2710 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2711 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2712 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2713 IEM_MC_FETCH_EFLAGS(EFlags);
2714 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2715
2716 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
2717 IEM_MC_COMMIT_EFLAGS(EFlags);
2718 IEM_MC_ADVANCE_RIP_AND_FINISH();
2719 IEM_MC_END();
2720 }
2721}
2722
2723
2724/**
2725 * @opcode 0x63
2726 *
2727 * @note This is a weird one. It works like a regular move instruction if
2728 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2729 * @todo This definitely needs a testcase to verify the odd cases. */
2730FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2731{
2732 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2733
2734 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2736
2737 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2738 {
2739 if (IEM_IS_MODRM_REG_MODE(bRm))
2740 {
2741 /*
2742 * Register to register.
2743 */
2744 IEM_MC_BEGIN(0, 1);
2745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2746 IEM_MC_LOCAL(uint64_t, u64Value);
2747 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2748 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2749 IEM_MC_ADVANCE_RIP_AND_FINISH();
2750 IEM_MC_END();
2751 }
2752 else
2753 {
2754 /*
2755 * We're loading a register from memory.
2756 */
2757 IEM_MC_BEGIN(0, 2);
2758 IEM_MC_LOCAL(uint64_t, u64Value);
2759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2762 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2763 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2764 IEM_MC_ADVANCE_RIP_AND_FINISH();
2765 IEM_MC_END();
2766 }
2767 }
2768 else
2769 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2770}
2771
2772
2773/**
2774 * @opcode 0x64
2775 * @opmnemonic segfs
2776 * @opmincpu 80386
2777 * @opgroup og_prefixes
2778 */
2779FNIEMOP_DEF(iemOp_seg_FS)
2780{
2781 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2782 IEMOP_HLP_MIN_386();
2783
2784 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2785 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2786
2787 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2788 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2789}
2790
2791
2792/**
2793 * @opcode 0x65
2794 * @opmnemonic seggs
2795 * @opmincpu 80386
2796 * @opgroup og_prefixes
2797 */
2798FNIEMOP_DEF(iemOp_seg_GS)
2799{
2800 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2801 IEMOP_HLP_MIN_386();
2802
2803 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2804 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2805
2806 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2807 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2808}
2809
2810
2811/**
2812 * @opcode 0x66
2813 * @opmnemonic opsize
2814 * @openc prefix
2815 * @opmincpu 80386
2816 * @ophints harmless
2817 * @opgroup og_prefixes
2818 */
2819FNIEMOP_DEF(iemOp_op_size)
2820{
2821 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2822 IEMOP_HLP_MIN_386();
2823
2824 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2825 iemRecalEffOpSize(pVCpu);
2826
2827 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2828 when REPZ or REPNZ are present. */
2829 if (pVCpu->iem.s.idxPrefix == 0)
2830 pVCpu->iem.s.idxPrefix = 1;
2831
2832 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2833 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2834}
2835
2836
2837/**
2838 * @opcode 0x67
2839 * @opmnemonic addrsize
2840 * @openc prefix
2841 * @opmincpu 80386
2842 * @ophints harmless
2843 * @opgroup og_prefixes
2844 */
2845FNIEMOP_DEF(iemOp_addr_size)
2846{
2847 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2848 IEMOP_HLP_MIN_386();
2849
2850 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2851 switch (pVCpu->iem.s.enmDefAddrMode)
2852 {
2853 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2854 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2855 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2856 default: AssertFailed();
2857 }
2858
2859 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2860 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2861}
2862
2863
2864/**
2865 * @opcode 0x68
2866 */
2867FNIEMOP_DEF(iemOp_push_Iz)
2868{
2869 IEMOP_MNEMONIC(push_Iz, "push Iz");
2870 IEMOP_HLP_MIN_186();
2871 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2872 switch (pVCpu->iem.s.enmEffOpSize)
2873 {
2874 case IEMMODE_16BIT:
2875 {
2876 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2877 IEM_MC_BEGIN(0,0);
2878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2879 IEM_MC_PUSH_U16(u16Imm);
2880 IEM_MC_ADVANCE_RIP_AND_FINISH();
2881 IEM_MC_END();
2882 break;
2883 }
2884
2885 case IEMMODE_32BIT:
2886 {
2887 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2888 IEM_MC_BEGIN(0,0);
2889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2890 IEM_MC_PUSH_U32(u32Imm);
2891 IEM_MC_ADVANCE_RIP_AND_FINISH();
2892 IEM_MC_END();
2893 break;
2894 }
2895
2896 case IEMMODE_64BIT:
2897 {
2898 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2899 IEM_MC_BEGIN(0,0);
2900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2901 IEM_MC_PUSH_U64(u64Imm);
2902 IEM_MC_ADVANCE_RIP_AND_FINISH();
2903 IEM_MC_END();
2904 break;
2905 }
2906
2907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2908 }
2909}
2910
2911
2912/**
2913 * @opcode 0x69
2914 */
2915FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2916{
2917 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2918 IEMOP_HLP_MIN_186();
2919 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2920 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2921
2922 switch (pVCpu->iem.s.enmEffOpSize)
2923 {
2924 case IEMMODE_16BIT:
2925 {
2926 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2927 if (IEM_IS_MODRM_REG_MODE(bRm))
2928 {
2929 /* register operand */
2930 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2931 IEM_MC_BEGIN(3, 1);
2932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2933 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2934 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2935 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2936 IEM_MC_LOCAL(uint16_t, u16Tmp);
2937
2938 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2939 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2940 IEM_MC_REF_EFLAGS(pEFlags);
2941 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2942 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2943
2944 IEM_MC_ADVANCE_RIP_AND_FINISH();
2945 IEM_MC_END();
2946 }
2947 else
2948 {
2949 /* memory operand */
2950 IEM_MC_BEGIN(3, 2);
2951 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2952 IEM_MC_ARG(uint16_t, u16Src, 1);
2953 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2954 IEM_MC_LOCAL(uint16_t, u16Tmp);
2955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2956
2957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2958 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2959 IEM_MC_ASSIGN(u16Src, u16Imm);
2960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2961 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2962 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2963 IEM_MC_REF_EFLAGS(pEFlags);
2964 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2965 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2966
2967 IEM_MC_ADVANCE_RIP_AND_FINISH();
2968 IEM_MC_END();
2969 }
2970 break;
2971 }
2972
2973 case IEMMODE_32BIT:
2974 {
2975 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2976 if (IEM_IS_MODRM_REG_MODE(bRm))
2977 {
2978 /* register operand */
2979 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2980 IEM_MC_BEGIN(3, 1);
2981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2982 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2983 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2984 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2985 IEM_MC_LOCAL(uint32_t, u32Tmp);
2986
2987 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2988 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2989 IEM_MC_REF_EFLAGS(pEFlags);
2990 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2991 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2992
2993 IEM_MC_ADVANCE_RIP_AND_FINISH();
2994 IEM_MC_END();
2995 }
2996 else
2997 {
2998 /* memory operand */
2999 IEM_MC_BEGIN(3, 2);
3000 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3001 IEM_MC_ARG(uint32_t, u32Src, 1);
3002 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3003 IEM_MC_LOCAL(uint32_t, u32Tmp);
3004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3005
3006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3007 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3008 IEM_MC_ASSIGN(u32Src, u32Imm);
3009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3010 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3011 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3012 IEM_MC_REF_EFLAGS(pEFlags);
3013 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3014 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3015
3016 IEM_MC_ADVANCE_RIP_AND_FINISH();
3017 IEM_MC_END();
3018 }
3019 break;
3020 }
3021
3022 case IEMMODE_64BIT:
3023 {
3024 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3025 if (IEM_IS_MODRM_REG_MODE(bRm))
3026 {
3027 /* register operand */
3028 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3029 IEM_MC_BEGIN(3, 1);
3030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3031 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3032 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
3033 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3034 IEM_MC_LOCAL(uint64_t, u64Tmp);
3035
3036 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3037 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3038 IEM_MC_REF_EFLAGS(pEFlags);
3039 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3040 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3041
3042 IEM_MC_ADVANCE_RIP_AND_FINISH();
3043 IEM_MC_END();
3044 }
3045 else
3046 {
3047 /* memory operand */
3048 IEM_MC_BEGIN(3, 2);
3049 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3050 IEM_MC_ARG(uint64_t, u64Src, 1);
3051 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3052 IEM_MC_LOCAL(uint64_t, u64Tmp);
3053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3054
3055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3056 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3057 IEM_MC_ASSIGN_U32_SX_U64(u64Src, u32Imm); /* parameter count for the threaded function for this block. */
3058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3059 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3060 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3061 IEM_MC_REF_EFLAGS(pEFlags);
3062 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3063 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3064
3065 IEM_MC_ADVANCE_RIP_AND_FINISH();
3066 IEM_MC_END();
3067 }
3068 break;
3069 }
3070
3071 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3072 }
3073}
3074
3075
3076/**
3077 * @opcode 0x6a
3078 */
3079FNIEMOP_DEF(iemOp_push_Ib)
3080{
3081 IEMOP_MNEMONIC(push_Ib, "push Ib");
3082 IEMOP_HLP_MIN_186();
3083 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3084 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3085
3086 switch (pVCpu->iem.s.enmEffOpSize)
3087 {
3088 case IEMMODE_16BIT:
3089 IEM_MC_BEGIN(0,0);
3090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3091 IEM_MC_PUSH_U16(i8Imm);
3092 IEM_MC_ADVANCE_RIP_AND_FINISH();
3093 IEM_MC_END();
3094 break;
3095 case IEMMODE_32BIT:
3096 IEM_MC_BEGIN(0,0);
3097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3098 IEM_MC_PUSH_U32(i8Imm);
3099 IEM_MC_ADVANCE_RIP_AND_FINISH();
3100 IEM_MC_END();
3101 break;
3102 case IEMMODE_64BIT:
3103 IEM_MC_BEGIN(0,0);
3104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3105 IEM_MC_PUSH_U64(i8Imm);
3106 IEM_MC_ADVANCE_RIP_AND_FINISH();
3107 IEM_MC_END();
3108 break;
3109 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3110 }
3111}
3112
3113
3114/**
3115 * @opcode 0x6b
3116 */
3117FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3118{
3119 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3120 IEMOP_HLP_MIN_186();
3121 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3122 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3123
3124 switch (pVCpu->iem.s.enmEffOpSize)
3125 {
3126 case IEMMODE_16BIT:
3127 {
3128 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3129 if (IEM_IS_MODRM_REG_MODE(bRm))
3130 {
3131 /* register operand */
3132 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3133 IEM_MC_BEGIN(3, 1);
3134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3135 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3136 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3137 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3138 IEM_MC_LOCAL(uint16_t, u16Tmp);
3139
3140 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3141 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3142 IEM_MC_REF_EFLAGS(pEFlags);
3143 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3144 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3145
3146 IEM_MC_ADVANCE_RIP_AND_FINISH();
3147 IEM_MC_END();
3148 }
3149 else
3150 {
3151 /* memory operand */
3152 IEM_MC_BEGIN(3, 2);
3153 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3154 IEM_MC_ARG(uint16_t, u16Src, 1);
3155 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3156 IEM_MC_LOCAL(uint16_t, u16Tmp);
3157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3158
3159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3160 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3161 IEM_MC_ASSIGN(u16Src, u16Imm);
3162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3163 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3164 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3165 IEM_MC_REF_EFLAGS(pEFlags);
3166 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3167 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3168
3169 IEM_MC_ADVANCE_RIP_AND_FINISH();
3170 IEM_MC_END();
3171 }
3172 break;
3173 }
3174
3175 case IEMMODE_32BIT:
3176 {
3177 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3178 if (IEM_IS_MODRM_REG_MODE(bRm))
3179 {
3180 /* register operand */
3181 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3182 IEM_MC_BEGIN(3, 1);
3183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3184 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3185 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3186 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3187 IEM_MC_LOCAL(uint32_t, u32Tmp);
3188
3189 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3190 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3191 IEM_MC_REF_EFLAGS(pEFlags);
3192 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3193 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3194
3195 IEM_MC_ADVANCE_RIP_AND_FINISH();
3196 IEM_MC_END();
3197 }
3198 else
3199 {
3200 /* memory operand */
3201 IEM_MC_BEGIN(3, 2);
3202 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3203 IEM_MC_ARG(uint32_t, u32Src, 1);
3204 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3205 IEM_MC_LOCAL(uint32_t, u32Tmp);
3206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3207
3208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3209 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3210 IEM_MC_ASSIGN(u32Src, u32Imm);
3211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3212 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3213 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3214 IEM_MC_REF_EFLAGS(pEFlags);
3215 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3216 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3217
3218 IEM_MC_ADVANCE_RIP_AND_FINISH();
3219 IEM_MC_END();
3220 }
3221 break;
3222 }
3223
3224 case IEMMODE_64BIT:
3225 {
3226 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3227 if (IEM_IS_MODRM_REG_MODE(bRm))
3228 {
3229 /* register operand */
3230 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3231 IEM_MC_BEGIN(3, 1);
3232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3233 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3234 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
3235 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3236 IEM_MC_LOCAL(uint64_t, u64Tmp);
3237
3238 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3239 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3240 IEM_MC_REF_EFLAGS(pEFlags);
3241 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3242 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3243
3244 IEM_MC_ADVANCE_RIP_AND_FINISH();
3245 IEM_MC_END();
3246 }
3247 else
3248 {
3249 /* memory operand */
3250 IEM_MC_BEGIN(3, 2);
3251 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3252 IEM_MC_ARG(uint64_t, u64Src, 1);
3253 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3254 IEM_MC_LOCAL(uint64_t, u64Tmp);
3255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3256
3257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3258 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the */
3259 IEM_MC_ASSIGN_U8_SX_U64(u64Src, u8Imm); /* parameter count for the threaded function for this block. */
3260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3261 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3262 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3263 IEM_MC_REF_EFLAGS(pEFlags);
3264 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3265 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3266
3267 IEM_MC_ADVANCE_RIP_AND_FINISH();
3268 IEM_MC_END();
3269 }
3270 break;
3271 }
3272
3273 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3274 }
3275}
3276
3277
3278/**
3279 * @opcode 0x6c
3280 */
3281FNIEMOP_DEF(iemOp_insb_Yb_DX)
3282{
3283 IEMOP_HLP_MIN_186();
3284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3285 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3286 {
3287 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3288 switch (pVCpu->iem.s.enmEffAddrMode)
3289 {
3290 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3291 iemCImpl_rep_ins_op8_addr16, false);
3292 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3293 iemCImpl_rep_ins_op8_addr32, false);
3294 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3295 iemCImpl_rep_ins_op8_addr64, false);
3296 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3297 }
3298 }
3299 else
3300 {
3301 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3302 switch (pVCpu->iem.s.enmEffAddrMode)
3303 {
3304 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3305 iemCImpl_ins_op8_addr16, false);
3306 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3307 iemCImpl_ins_op8_addr32, false);
3308 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3309 iemCImpl_ins_op8_addr64, false);
3310 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3311 }
3312 }
3313}
3314
3315
3316/**
3317 * @opcode 0x6d
3318 */
3319FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3320{
3321 IEMOP_HLP_MIN_186();
3322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3323 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3324 {
3325 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3326 switch (pVCpu->iem.s.enmEffOpSize)
3327 {
3328 case IEMMODE_16BIT:
3329 switch (pVCpu->iem.s.enmEffAddrMode)
3330 {
3331 case IEMMODE_16BIT:
3332 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3333 iemCImpl_rep_ins_op16_addr16, false);
3334 case IEMMODE_32BIT:
3335 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3336 iemCImpl_rep_ins_op16_addr32, false);
3337 case IEMMODE_64BIT:
3338 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3339 iemCImpl_rep_ins_op16_addr64, false);
3340 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3341 }
3342 break;
3343 case IEMMODE_64BIT:
3344 case IEMMODE_32BIT:
3345 switch (pVCpu->iem.s.enmEffAddrMode)
3346 {
3347 case IEMMODE_16BIT:
3348 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3349 iemCImpl_rep_ins_op32_addr16, false);
3350 case IEMMODE_32BIT:
3351 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3352 iemCImpl_rep_ins_op32_addr32, false);
3353 case IEMMODE_64BIT:
3354 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3355 iemCImpl_rep_ins_op32_addr64, false);
3356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3357 }
3358 break;
3359 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3360 }
3361 }
3362 else
3363 {
3364 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3365 switch (pVCpu->iem.s.enmEffOpSize)
3366 {
3367 case IEMMODE_16BIT:
3368 switch (pVCpu->iem.s.enmEffAddrMode)
3369 {
3370 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3371 iemCImpl_ins_op16_addr16, false);
3372 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3373 iemCImpl_ins_op16_addr32, false);
3374 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3375 iemCImpl_ins_op16_addr64, false);
3376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3377 }
3378 break;
3379 case IEMMODE_64BIT:
3380 case IEMMODE_32BIT:
3381 switch (pVCpu->iem.s.enmEffAddrMode)
3382 {
3383 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3384 iemCImpl_ins_op32_addr16, false);
3385 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3386 iemCImpl_ins_op32_addr32, false);
3387 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3388 iemCImpl_ins_op32_addr64, false);
3389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3390 }
3391 break;
3392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3393 }
3394 }
3395}
3396
3397
3398/**
3399 * @opcode 0x6e
3400 */
3401FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3402{
3403 IEMOP_HLP_MIN_186();
3404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3405 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3406 {
3407 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3408 switch (pVCpu->iem.s.enmEffAddrMode)
3409 {
3410 case IEMMODE_16BIT:
3411 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3412 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3413 case IEMMODE_32BIT:
3414 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3415 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3416 case IEMMODE_64BIT:
3417 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3418 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3419 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3420 }
3421 }
3422 else
3423 {
3424 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3425 switch (pVCpu->iem.s.enmEffAddrMode)
3426 {
3427 case IEMMODE_16BIT:
3428 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3429 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3430 case IEMMODE_32BIT:
3431 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3432 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3433 case IEMMODE_64BIT:
3434 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3435 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3436 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3437 }
3438 }
3439}
3440
3441
3442/**
3443 * @opcode 0x6f
3444 */
3445FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3446{
3447 IEMOP_HLP_MIN_186();
3448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3449 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3450 {
3451 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3452 switch (pVCpu->iem.s.enmEffOpSize)
3453 {
3454 case IEMMODE_16BIT:
3455 switch (pVCpu->iem.s.enmEffAddrMode)
3456 {
3457 case IEMMODE_16BIT:
3458 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3459 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3460 case IEMMODE_32BIT:
3461 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3462 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3463 case IEMMODE_64BIT:
3464 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3465 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3466 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3467 }
3468 break;
3469 case IEMMODE_64BIT:
3470 case IEMMODE_32BIT:
3471 switch (pVCpu->iem.s.enmEffAddrMode)
3472 {
3473 case IEMMODE_16BIT:
3474 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3475 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3476 case IEMMODE_32BIT:
3477 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3478 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3479 case IEMMODE_64BIT:
3480 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3481 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3482 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3483 }
3484 break;
3485 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3486 }
3487 }
3488 else
3489 {
3490 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3491 switch (pVCpu->iem.s.enmEffOpSize)
3492 {
3493 case IEMMODE_16BIT:
3494 switch (pVCpu->iem.s.enmEffAddrMode)
3495 {
3496 case IEMMODE_16BIT:
3497 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3498 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3499 case IEMMODE_32BIT:
3500 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3501 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3502 case IEMMODE_64BIT:
3503 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3504 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3505 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3506 }
3507 break;
3508 case IEMMODE_64BIT:
3509 case IEMMODE_32BIT:
3510 switch (pVCpu->iem.s.enmEffAddrMode)
3511 {
3512 case IEMMODE_16BIT:
3513 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3514 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3515 case IEMMODE_32BIT:
3516 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3517 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3518 case IEMMODE_64BIT:
3519 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3520 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3521 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3522 }
3523 break;
3524 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3525 }
3526 }
3527}
3528
3529
3530/**
3531 * @opcode 0x70
3532 */
3533FNIEMOP_DEF(iemOp_jo_Jb)
3534{
3535 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3536 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3537 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3538
3539 IEM_MC_BEGIN(0, 0);
3540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3541 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3542 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3543 } IEM_MC_ELSE() {
3544 IEM_MC_ADVANCE_RIP_AND_FINISH();
3545 } IEM_MC_ENDIF();
3546 IEM_MC_END();
3547}
3548
3549
3550/**
3551 * @opcode 0x71
3552 */
3553FNIEMOP_DEF(iemOp_jno_Jb)
3554{
3555 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3556 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3557 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3558
3559 IEM_MC_BEGIN(0, 0);
3560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3561 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3562 IEM_MC_ADVANCE_RIP_AND_FINISH();
3563 } IEM_MC_ELSE() {
3564 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3565 } IEM_MC_ENDIF();
3566 IEM_MC_END();
3567}
3568
3569/**
3570 * @opcode 0x72
3571 */
3572FNIEMOP_DEF(iemOp_jc_Jb)
3573{
3574 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3575 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3576 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3577
3578 IEM_MC_BEGIN(0, 0);
3579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3580 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3581 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3582 } IEM_MC_ELSE() {
3583 IEM_MC_ADVANCE_RIP_AND_FINISH();
3584 } IEM_MC_ENDIF();
3585 IEM_MC_END();
3586}
3587
3588
3589/**
3590 * @opcode 0x73
3591 */
3592FNIEMOP_DEF(iemOp_jnc_Jb)
3593{
3594 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3595 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3596 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3597
3598 IEM_MC_BEGIN(0, 0);
3599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3600 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3601 IEM_MC_ADVANCE_RIP_AND_FINISH();
3602 } IEM_MC_ELSE() {
3603 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3604 } IEM_MC_ENDIF();
3605 IEM_MC_END();
3606}
3607
3608
3609/**
3610 * @opcode 0x74
3611 */
3612FNIEMOP_DEF(iemOp_je_Jb)
3613{
3614 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3615 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3616 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3617
3618 IEM_MC_BEGIN(0, 0);
3619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3620 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3621 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3622 } IEM_MC_ELSE() {
3623 IEM_MC_ADVANCE_RIP_AND_FINISH();
3624 } IEM_MC_ENDIF();
3625 IEM_MC_END();
3626}
3627
3628
3629/**
3630 * @opcode 0x75
3631 */
3632FNIEMOP_DEF(iemOp_jne_Jb)
3633{
3634 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3635 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3636 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3637
3638 IEM_MC_BEGIN(0, 0);
3639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3640 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3641 IEM_MC_ADVANCE_RIP_AND_FINISH();
3642 } IEM_MC_ELSE() {
3643 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3644 } IEM_MC_ENDIF();
3645 IEM_MC_END();
3646}
3647
3648
3649/**
3650 * @opcode 0x76
3651 */
3652FNIEMOP_DEF(iemOp_jbe_Jb)
3653{
3654 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3655 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3656 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3657
3658 IEM_MC_BEGIN(0, 0);
3659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3660 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3661 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3662 } IEM_MC_ELSE() {
3663 IEM_MC_ADVANCE_RIP_AND_FINISH();
3664 } IEM_MC_ENDIF();
3665 IEM_MC_END();
3666}
3667
3668
3669/**
3670 * @opcode 0x77
3671 */
3672FNIEMOP_DEF(iemOp_jnbe_Jb)
3673{
3674 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3675 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3676 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3677
3678 IEM_MC_BEGIN(0, 0);
3679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3680 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3681 IEM_MC_ADVANCE_RIP_AND_FINISH();
3682 } IEM_MC_ELSE() {
3683 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3684 } IEM_MC_ENDIF();
3685 IEM_MC_END();
3686}
3687
3688
3689/**
3690 * @opcode 0x78
3691 */
3692FNIEMOP_DEF(iemOp_js_Jb)
3693{
3694 IEMOP_MNEMONIC(js_Jb, "js Jb");
3695 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3696 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3697
3698 IEM_MC_BEGIN(0, 0);
3699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3700 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3701 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3702 } IEM_MC_ELSE() {
3703 IEM_MC_ADVANCE_RIP_AND_FINISH();
3704 } IEM_MC_ENDIF();
3705 IEM_MC_END();
3706}
3707
3708
3709/**
3710 * @opcode 0x79
3711 */
3712FNIEMOP_DEF(iemOp_jns_Jb)
3713{
3714 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3715 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3716 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3717
3718 IEM_MC_BEGIN(0, 0);
3719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3720 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3721 IEM_MC_ADVANCE_RIP_AND_FINISH();
3722 } IEM_MC_ELSE() {
3723 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3724 } IEM_MC_ENDIF();
3725 IEM_MC_END();
3726}
3727
3728
3729/**
3730 * @opcode 0x7a
3731 */
3732FNIEMOP_DEF(iemOp_jp_Jb)
3733{
3734 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3735 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3736 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3737
3738 IEM_MC_BEGIN(0, 0);
3739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3740 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3741 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3742 } IEM_MC_ELSE() {
3743 IEM_MC_ADVANCE_RIP_AND_FINISH();
3744 } IEM_MC_ENDIF();
3745 IEM_MC_END();
3746}
3747
3748
3749/**
3750 * @opcode 0x7b
3751 */
3752FNIEMOP_DEF(iemOp_jnp_Jb)
3753{
3754 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3755 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3756 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3757
3758 IEM_MC_BEGIN(0, 0);
3759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3760 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3761 IEM_MC_ADVANCE_RIP_AND_FINISH();
3762 } IEM_MC_ELSE() {
3763 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3764 } IEM_MC_ENDIF();
3765 IEM_MC_END();
3766}
3767
3768
3769/**
3770 * @opcode 0x7c
3771 */
3772FNIEMOP_DEF(iemOp_jl_Jb)
3773{
3774 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3775 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3776 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3777
3778 IEM_MC_BEGIN(0, 0);
3779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3780 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3781 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3782 } IEM_MC_ELSE() {
3783 IEM_MC_ADVANCE_RIP_AND_FINISH();
3784 } IEM_MC_ENDIF();
3785 IEM_MC_END();
3786}
3787
3788
3789/**
3790 * @opcode 0x7d
3791 */
3792FNIEMOP_DEF(iemOp_jnl_Jb)
3793{
3794 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3795 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3796 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3797
3798 IEM_MC_BEGIN(0, 0);
3799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3800 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3801 IEM_MC_ADVANCE_RIP_AND_FINISH();
3802 } IEM_MC_ELSE() {
3803 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3804 } IEM_MC_ENDIF();
3805 IEM_MC_END();
3806}
3807
3808
3809/**
3810 * @opcode 0x7e
3811 */
3812FNIEMOP_DEF(iemOp_jle_Jb)
3813{
3814 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3815 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3816 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3817
3818 IEM_MC_BEGIN(0, 0);
3819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3820 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3821 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3822 } IEM_MC_ELSE() {
3823 IEM_MC_ADVANCE_RIP_AND_FINISH();
3824 } IEM_MC_ENDIF();
3825 IEM_MC_END();
3826}
3827
3828
3829/**
3830 * @opcode 0x7f
3831 */
3832FNIEMOP_DEF(iemOp_jnle_Jb)
3833{
3834 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3835 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3836 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3837
3838 IEM_MC_BEGIN(0, 0);
3839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3840 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3841 IEM_MC_ADVANCE_RIP_AND_FINISH();
3842 } IEM_MC_ELSE() {
3843 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3844 } IEM_MC_ENDIF();
3845 IEM_MC_END();
3846}
3847
3848
3849/**
3850 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3851 * iemOp_Grp1_Eb_Ib_80.
3852 */
3853#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
3854 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3855 { \
3856 /* register target */ \
3857 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3858 IEM_MC_BEGIN(3, 0); \
3859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3860 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3861 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3862 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3863 \
3864 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3865 IEM_MC_REF_EFLAGS(pEFlags); \
3866 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3867 \
3868 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3869 IEM_MC_END(); \
3870 } \
3871 else \
3872 { \
3873 /* memory target */ \
3874 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3875 { \
3876 IEM_MC_BEGIN(3, 3); \
3877 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3878 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3880 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3881 \
3882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3883 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3884 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3885 IEMOP_HLP_DONE_DECODING(); \
3886 \
3887 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3888 IEM_MC_FETCH_EFLAGS(EFlags); \
3889 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3890 \
3891 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
3892 IEM_MC_COMMIT_EFLAGS(EFlags); \
3893 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3894 IEM_MC_END(); \
3895 } \
3896 else \
3897 { \
3898 (void)0
3899
3900#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3901 IEM_MC_BEGIN(3, 3); \
3902 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3903 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3905 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3906 \
3907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3908 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3909 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3910 IEMOP_HLP_DONE_DECODING(); \
3911 \
3912 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3913 IEM_MC_FETCH_EFLAGS(EFlags); \
3914 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
3915 \
3916 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
3917 IEM_MC_COMMIT_EFLAGS(EFlags); \
3918 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3919 IEM_MC_END(); \
3920 } \
3921 } \
3922 (void)0
3923
3924#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
3925 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3926 { \
3927 /* register target */ \
3928 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3929 IEM_MC_BEGIN(3, 0); \
3930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3931 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3932 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3933 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3934 \
3935 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3936 IEM_MC_REF_EFLAGS(pEFlags); \
3937 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3938 \
3939 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3940 IEM_MC_END(); \
3941 } \
3942 else \
3943 { \
3944 /* memory target */ \
3945 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3946 { \
3947 IEM_MC_BEGIN(3, 3); \
3948 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
3949 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3951 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3952 \
3953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3954 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3955 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3956 IEMOP_HLP_DONE_DECODING(); \
3957 \
3958 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3959 IEM_MC_FETCH_EFLAGS(EFlags); \
3960 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3961 \
3962 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
3963 IEM_MC_COMMIT_EFLAGS(EFlags); \
3964 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3965 IEM_MC_END(); \
3966 } \
3967 else \
3968 { \
3969 (void)0
3970
3971#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
3972 IEMOP_HLP_DONE_DECODING(); \
3973 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
3974 } \
3975 } \
3976 (void)0
3977
3978
3979
3980/**
3981 * @opmaps grp1_80,grp1_83
3982 * @opcode /0
3983 */
3984FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
3985{
3986 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
3987 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
3988 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
3989}
3990
3991
3992/**
3993 * @opmaps grp1_80,grp1_83
3994 * @opcode /1
3995 */
3996FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
3997{
3998 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
3999 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
4000 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
4001}
4002
4003
4004/**
4005 * @opmaps grp1_80,grp1_83
4006 * @opcode /2
4007 */
4008FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4009{
4010 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4011 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4012 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4013}
4014
4015
4016/**
4017 * @opmaps grp1_80,grp1_83
4018 * @opcode /3
4019 */
4020FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4021{
4022 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4023 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4024 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4025}
4026
4027
4028/**
4029 * @opmaps grp1_80,grp1_83
4030 * @opcode /4
4031 */
4032FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4033{
4034 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4035 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4036 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4037}
4038
4039
4040/**
4041 * @opmaps grp1_80,grp1_83
4042 * @opcode /5
4043 */
4044FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4045{
4046 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4047 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4048 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4049}
4050
4051
4052/**
4053 * @opmaps grp1_80,grp1_83
4054 * @opcode /6
4055 */
4056FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4057{
4058 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4059 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4060 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4061}
4062
4063
4064/**
4065 * @opmaps grp1_80,grp1_83
4066 * @opcode /7
4067 */
4068FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4069{
4070 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4071 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4072 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4073}
4074
4075
4076/**
4077 * @opcode 0x80
4078 */
4079FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4080{
4081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4082 switch (IEM_GET_MODRM_REG_8(bRm))
4083 {
4084 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4085 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4086 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4087 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4088 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4089 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4090 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4091 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4092 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4093 }
4094}
4095
4096
4097/**
4098 * Body for a group 1 binary operator.
4099 */
4100#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4101 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4102 { \
4103 /* register target */ \
4104 switch (pVCpu->iem.s.enmEffOpSize) \
4105 { \
4106 case IEMMODE_16BIT: \
4107 { \
4108 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4109 IEM_MC_BEGIN(3, 0); \
4110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4111 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4112 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4113 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4114 \
4115 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4116 IEM_MC_REF_EFLAGS(pEFlags); \
4117 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4118 \
4119 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4120 IEM_MC_END(); \
4121 break; \
4122 } \
4123 \
4124 case IEMMODE_32BIT: \
4125 { \
4126 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4127 IEM_MC_BEGIN(3, 0); \
4128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4129 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4130 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4131 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4132 \
4133 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4134 IEM_MC_REF_EFLAGS(pEFlags); \
4135 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4136 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
4137 \
4138 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4139 IEM_MC_END(); \
4140 break; \
4141 } \
4142 \
4143 case IEMMODE_64BIT: \
4144 { \
4145 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4146 IEM_MC_BEGIN(3, 0); \
4147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4148 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4149 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4150 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4151 \
4152 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4153 IEM_MC_REF_EFLAGS(pEFlags); \
4154 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4155 \
4156 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4157 IEM_MC_END(); \
4158 break; \
4159 } \
4160 \
4161 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4162 } \
4163 } \
4164 else \
4165 { \
4166 /* memory target */ \
4167 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4168 { \
4169 switch (pVCpu->iem.s.enmEffOpSize) \
4170 { \
4171 case IEMMODE_16BIT: \
4172 { \
4173 IEM_MC_BEGIN(3, 3); \
4174 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4175 IEM_MC_ARG(uint16_t, u16Src, 1); \
4176 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4178 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4179 \
4180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4181 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4182 IEM_MC_ASSIGN(u16Src, u16Imm); \
4183 IEMOP_HLP_DONE_DECODING(); \
4184 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4185 IEM_MC_FETCH_EFLAGS(EFlags); \
4186 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4187 \
4188 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4189 IEM_MC_COMMIT_EFLAGS(EFlags); \
4190 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4191 IEM_MC_END(); \
4192 break; \
4193 } \
4194 \
4195 case IEMMODE_32BIT: \
4196 { \
4197 IEM_MC_BEGIN(3, 3); \
4198 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4199 IEM_MC_ARG(uint32_t, u32Src, 1); \
4200 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4202 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4203 \
4204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4205 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4206 IEM_MC_ASSIGN(u32Src, u32Imm); \
4207 IEMOP_HLP_DONE_DECODING(); \
4208 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4209 IEM_MC_FETCH_EFLAGS(EFlags); \
4210 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4211 \
4212 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4213 IEM_MC_COMMIT_EFLAGS(EFlags); \
4214 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4215 IEM_MC_END(); \
4216 break; \
4217 } \
4218 \
4219 case IEMMODE_64BIT: \
4220 { \
4221 IEM_MC_BEGIN(3, 3); \
4222 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4223 IEM_MC_ARG(uint64_t, u64Src, 1); \
4224 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4226 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4227 \
4228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4229 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4230 IEMOP_HLP_DONE_DECODING(); \
4231 IEM_MC_ASSIGN(u64Src, u64Imm); \
4232 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4233 IEM_MC_FETCH_EFLAGS(EFlags); \
4234 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4235 \
4236 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4237 IEM_MC_COMMIT_EFLAGS(EFlags); \
4238 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4239 IEM_MC_END(); \
4240 break; \
4241 } \
4242 \
4243 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4244 } \
4245 } \
4246 else \
4247 { \
4248 (void)0
4249/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4250#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4251 switch (pVCpu->iem.s.enmEffOpSize) \
4252 { \
4253 case IEMMODE_16BIT: \
4254 { \
4255 IEM_MC_BEGIN(3, 3); \
4256 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4257 IEM_MC_ARG(uint16_t, u16Src, 1); \
4258 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4260 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4261 \
4262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4263 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4264 IEM_MC_ASSIGN(u16Src, u16Imm); \
4265 IEMOP_HLP_DONE_DECODING(); \
4266 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4267 IEM_MC_FETCH_EFLAGS(EFlags); \
4268 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4269 \
4270 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4271 IEM_MC_COMMIT_EFLAGS(EFlags); \
4272 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4273 IEM_MC_END(); \
4274 break; \
4275 } \
4276 \
4277 case IEMMODE_32BIT: \
4278 { \
4279 IEM_MC_BEGIN(3, 3); \
4280 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4281 IEM_MC_ARG(uint32_t, u32Src, 1); \
4282 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4284 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4285 \
4286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4287 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4288 IEM_MC_ASSIGN(u32Src, u32Imm); \
4289 IEMOP_HLP_DONE_DECODING(); \
4290 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4291 IEM_MC_FETCH_EFLAGS(EFlags); \
4292 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4293 \
4294 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4295 IEM_MC_COMMIT_EFLAGS(EFlags); \
4296 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4297 IEM_MC_END(); \
4298 break; \
4299 } \
4300 \
4301 case IEMMODE_64BIT: \
4302 { \
4303 IEM_MC_BEGIN(3, 3); \
4304 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4305 IEM_MC_ARG(uint64_t, u64Src, 1); \
4306 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4308 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4309 \
4310 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4311 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4312 IEMOP_HLP_DONE_DECODING(); \
4313 IEM_MC_ASSIGN(u64Src, u64Imm); \
4314 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4315 IEM_MC_FETCH_EFLAGS(EFlags); \
4316 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4317 \
4318 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4319 IEM_MC_COMMIT_EFLAGS(EFlags); \
4320 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4321 IEM_MC_END(); \
4322 break; \
4323 } \
4324 \
4325 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4326 } \
4327 } \
4328 } \
4329 (void)0
4330
4331/* read-only version */
4332#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4333 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4334 { \
4335 /* register target */ \
4336 switch (pVCpu->iem.s.enmEffOpSize) \
4337 { \
4338 case IEMMODE_16BIT: \
4339 { \
4340 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4341 IEM_MC_BEGIN(3, 0); \
4342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4343 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4344 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4345 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4346 \
4347 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4348 IEM_MC_REF_EFLAGS(pEFlags); \
4349 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4350 \
4351 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4352 IEM_MC_END(); \
4353 break; \
4354 } \
4355 \
4356 case IEMMODE_32BIT: \
4357 { \
4358 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4359 IEM_MC_BEGIN(3, 0); \
4360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4361 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4362 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4363 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4364 \
4365 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4366 IEM_MC_REF_EFLAGS(pEFlags); \
4367 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4368 \
4369 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4370 IEM_MC_END(); \
4371 break; \
4372 } \
4373 \
4374 case IEMMODE_64BIT: \
4375 { \
4376 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4377 IEM_MC_BEGIN(3, 0); \
4378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4379 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4380 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4381 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4382 \
4383 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4384 IEM_MC_REF_EFLAGS(pEFlags); \
4385 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4386 \
4387 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4388 IEM_MC_END(); \
4389 break; \
4390 } \
4391 \
4392 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4393 } \
4394 } \
4395 else \
4396 { \
4397 /* memory target */ \
4398 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4399 { \
4400 switch (pVCpu->iem.s.enmEffOpSize) \
4401 { \
4402 case IEMMODE_16BIT: \
4403 { \
4404 IEM_MC_BEGIN(3, 3); \
4405 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4406 IEM_MC_ARG(uint16_t, u16Src, 1); \
4407 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4409 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4410 \
4411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4412 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4413 IEM_MC_ASSIGN(u16Src, u16Imm); \
4414 IEMOP_HLP_DONE_DECODING(); \
4415 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4416 IEM_MC_FETCH_EFLAGS(EFlags); \
4417 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4418 \
4419 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
4420 IEM_MC_COMMIT_EFLAGS(EFlags); \
4421 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4422 IEM_MC_END(); \
4423 break; \
4424 } \
4425 \
4426 case IEMMODE_32BIT: \
4427 { \
4428 IEM_MC_BEGIN(3, 3); \
4429 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4430 IEM_MC_ARG(uint32_t, u32Src, 1); \
4431 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4433 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4434 \
4435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4436 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4437 IEM_MC_ASSIGN(u32Src, u32Imm); \
4438 IEMOP_HLP_DONE_DECODING(); \
4439 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4440 IEM_MC_FETCH_EFLAGS(EFlags); \
4441 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4442 \
4443 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
4444 IEM_MC_COMMIT_EFLAGS(EFlags); \
4445 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4446 IEM_MC_END(); \
4447 break; \
4448 } \
4449 \
4450 case IEMMODE_64BIT: \
4451 { \
4452 IEM_MC_BEGIN(3, 3); \
4453 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4454 IEM_MC_ARG(uint64_t, u64Src, 1); \
4455 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4457 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4458 \
4459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4460 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4461 IEMOP_HLP_DONE_DECODING(); \
4462 IEM_MC_ASSIGN(u64Src, u64Imm); \
4463 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4464 IEM_MC_FETCH_EFLAGS(EFlags); \
4465 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4466 \
4467 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
4468 IEM_MC_COMMIT_EFLAGS(EFlags); \
4469 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4470 IEM_MC_END(); \
4471 break; \
4472 } \
4473 \
4474 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4475 } \
4476 } \
4477 else \
4478 { \
4479 IEMOP_HLP_DONE_DECODING(); \
4480 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4481 } \
4482 } \
4483 (void)0
4484
4485
4486/**
4487 * @opmaps grp1_81
4488 * @opcode /0
4489 */
4490FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4491{
4492 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4493 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4494 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4495}
4496
4497
4498/**
4499 * @opmaps grp1_81
4500 * @opcode /1
4501 */
4502FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4503{
4504 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4505 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4506 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4507}
4508
4509
4510/**
4511 * @opmaps grp1_81
4512 * @opcode /2
4513 */
4514FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4515{
4516 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4517 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4518 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4519}
4520
4521
4522/**
4523 * @opmaps grp1_81
4524 * @opcode /3
4525 */
4526FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4527{
4528 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4529 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4530 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4531}
4532
4533
4534/**
4535 * @opmaps grp1_81
4536 * @opcode /4
4537 */
4538FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4539{
4540 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4541 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4542 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4543}
4544
4545
4546/**
4547 * @opmaps grp1_81
4548 * @opcode /5
4549 */
4550FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4551{
4552 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4553 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4554 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4555}
4556
4557
4558/**
4559 * @opmaps grp1_81
4560 * @opcode /6
4561 */
4562FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4563{
4564 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4565 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4566 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4567}
4568
4569
4570/**
4571 * @opmaps grp1_81
4572 * @opcode /7
4573 */
4574FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4575{
4576 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4577 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4578}
4579
4580
4581/**
4582 * @opcode 0x81
4583 */
4584FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4585{
4586 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4587 switch (IEM_GET_MODRM_REG_8(bRm))
4588 {
4589 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4590 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4591 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4592 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4593 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4594 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4595 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4596 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4597 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4598 }
4599}
4600
4601
4602/**
4603 * @opcode 0x82
4604 * @opmnemonic grp1_82
4605 * @opgroup og_groups
4606 */
4607FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4608{
4609 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4610 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4611}
4612
4613
4614/**
4615 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4616 * iemOp_Grp1_Ev_Ib.
4617 */
4618#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4619 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4620 { \
4621 /* \
4622 * Register target \
4623 */ \
4624 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4625 switch (pVCpu->iem.s.enmEffOpSize) \
4626 { \
4627 case IEMMODE_16BIT: \
4628 { \
4629 IEM_MC_BEGIN(3, 0); \
4630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4631 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4632 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4633 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4634 \
4635 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4636 IEM_MC_REF_EFLAGS(pEFlags); \
4637 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4638 \
4639 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4640 IEM_MC_END(); \
4641 break; \
4642 } \
4643 \
4644 case IEMMODE_32BIT: \
4645 { \
4646 IEM_MC_BEGIN(3, 0); \
4647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4648 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4649 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4650 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4651 \
4652 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4653 IEM_MC_REF_EFLAGS(pEFlags); \
4654 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4655 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
4656 \
4657 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4658 IEM_MC_END(); \
4659 break; \
4660 } \
4661 \
4662 case IEMMODE_64BIT: \
4663 { \
4664 IEM_MC_BEGIN(3, 0); \
4665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4666 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4667 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4668 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4669 \
4670 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4671 IEM_MC_REF_EFLAGS(pEFlags); \
4672 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4673 \
4674 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4675 IEM_MC_END(); \
4676 break; \
4677 } \
4678 \
4679 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4680 } \
4681 } \
4682 else \
4683 { \
4684 /* \
4685 * Memory target. \
4686 */ \
4687 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4688 { \
4689 switch (pVCpu->iem.s.enmEffOpSize) \
4690 { \
4691 case IEMMODE_16BIT: \
4692 { \
4693 IEM_MC_BEGIN(3, 3); \
4694 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4695 IEM_MC_ARG(uint16_t, u16Src, 1); \
4696 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4698 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4699 \
4700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4701 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4702 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4703 IEMOP_HLP_DONE_DECODING(); \
4704 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4705 IEM_MC_FETCH_EFLAGS(EFlags); \
4706 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4707 \
4708 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4709 IEM_MC_COMMIT_EFLAGS(EFlags); \
4710 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4711 IEM_MC_END(); \
4712 break; \
4713 } \
4714 \
4715 case IEMMODE_32BIT: \
4716 { \
4717 IEM_MC_BEGIN(3, 3); \
4718 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4719 IEM_MC_ARG(uint32_t, u32Src, 1); \
4720 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4722 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4723 \
4724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4725 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4726 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4727 IEMOP_HLP_DONE_DECODING(); \
4728 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4729 IEM_MC_FETCH_EFLAGS(EFlags); \
4730 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4731 \
4732 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4733 IEM_MC_COMMIT_EFLAGS(EFlags); \
4734 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4735 IEM_MC_END(); \
4736 break; \
4737 } \
4738 \
4739 case IEMMODE_64BIT: \
4740 { \
4741 IEM_MC_BEGIN(3, 3); \
4742 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4743 IEM_MC_ARG(uint64_t, u64Src, 1); \
4744 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4746 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4747 \
4748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4749 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4750 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4751 IEMOP_HLP_DONE_DECODING(); \
4752 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4753 IEM_MC_FETCH_EFLAGS(EFlags); \
4754 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4755 \
4756 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4757 IEM_MC_COMMIT_EFLAGS(EFlags); \
4758 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4759 IEM_MC_END(); \
4760 break; \
4761 } \
4762 \
4763 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4764 } \
4765 } \
4766 else \
4767 { \
4768 (void)0
4769/* Separate macro to work around parsing issue in IEMAllInstPython.py */
4770#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4771 switch (pVCpu->iem.s.enmEffOpSize) \
4772 { \
4773 case IEMMODE_16BIT: \
4774 { \
4775 IEM_MC_BEGIN(3, 3); \
4776 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4777 IEM_MC_ARG(uint16_t, u16Src, 1); \
4778 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4780 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4781 \
4782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4783 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4784 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4785 IEMOP_HLP_DONE_DECODING(); \
4786 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4787 IEM_MC_FETCH_EFLAGS(EFlags); \
4788 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4789 \
4790 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4791 IEM_MC_COMMIT_EFLAGS(EFlags); \
4792 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4793 IEM_MC_END(); \
4794 break; \
4795 } \
4796 \
4797 case IEMMODE_32BIT: \
4798 { \
4799 IEM_MC_BEGIN(3, 3); \
4800 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4801 IEM_MC_ARG(uint32_t, u32Src, 1); \
4802 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4803 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4804 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4805 \
4806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4807 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4808 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4809 IEMOP_HLP_DONE_DECODING(); \
4810 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4811 IEM_MC_FETCH_EFLAGS(EFlags); \
4812 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4813 \
4814 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4815 IEM_MC_COMMIT_EFLAGS(EFlags); \
4816 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4817 IEM_MC_END(); \
4818 break; \
4819 } \
4820 \
4821 case IEMMODE_64BIT: \
4822 { \
4823 IEM_MC_BEGIN(3, 3); \
4824 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4825 IEM_MC_ARG(uint64_t, u64Src, 1); \
4826 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4828 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4829 \
4830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4831 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4832 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4833 IEMOP_HLP_DONE_DECODING(); \
4834 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4835 IEM_MC_FETCH_EFLAGS(EFlags); \
4836 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4837 \
4838 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4839 IEM_MC_COMMIT_EFLAGS(EFlags); \
4840 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4841 IEM_MC_END(); \
4842 break; \
4843 } \
4844 \
4845 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4846 } \
4847 } \
4848 } \
4849 (void)0
4850
4851/* read-only variant */
4852#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4853 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4854 { \
4855 /* \
4856 * Register target \
4857 */ \
4858 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4859 switch (pVCpu->iem.s.enmEffOpSize) \
4860 { \
4861 case IEMMODE_16BIT: \
4862 { \
4863 IEM_MC_BEGIN(3, 0); \
4864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4865 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4866 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4867 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4868 \
4869 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4870 IEM_MC_REF_EFLAGS(pEFlags); \
4871 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4872 \
4873 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4874 IEM_MC_END(); \
4875 break; \
4876 } \
4877 \
4878 case IEMMODE_32BIT: \
4879 { \
4880 IEM_MC_BEGIN(3, 0); \
4881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4882 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4883 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4884 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4885 \
4886 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4887 IEM_MC_REF_EFLAGS(pEFlags); \
4888 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4889 \
4890 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4891 IEM_MC_END(); \
4892 break; \
4893 } \
4894 \
4895 case IEMMODE_64BIT: \
4896 { \
4897 IEM_MC_BEGIN(3, 0); \
4898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4899 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4900 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4901 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4902 \
4903 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4904 IEM_MC_REF_EFLAGS(pEFlags); \
4905 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4906 \
4907 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4908 IEM_MC_END(); \
4909 break; \
4910 } \
4911 \
4912 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4913 } \
4914 } \
4915 else \
4916 { \
4917 /* \
4918 * Memory target. \
4919 */ \
4920 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4921 { \
4922 switch (pVCpu->iem.s.enmEffOpSize) \
4923 { \
4924 case IEMMODE_16BIT: \
4925 { \
4926 IEM_MC_BEGIN(3, 3); \
4927 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4928 IEM_MC_ARG(uint16_t, u16Src, 1); \
4929 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4931 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4932 \
4933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4934 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4935 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4936 IEMOP_HLP_DONE_DECODING(); \
4937 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4938 IEM_MC_FETCH_EFLAGS(EFlags); \
4939 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4940 \
4941 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
4942 IEM_MC_COMMIT_EFLAGS(EFlags); \
4943 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4944 IEM_MC_END(); \
4945 break; \
4946 } \
4947 \
4948 case IEMMODE_32BIT: \
4949 { \
4950 IEM_MC_BEGIN(3, 3); \
4951 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4952 IEM_MC_ARG(uint32_t, u32Src, 1); \
4953 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4955 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4956 \
4957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4958 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4959 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4960 IEMOP_HLP_DONE_DECODING(); \
4961 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4962 IEM_MC_FETCH_EFLAGS(EFlags); \
4963 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4964 \
4965 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
4966 IEM_MC_COMMIT_EFLAGS(EFlags); \
4967 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4968 IEM_MC_END(); \
4969 break; \
4970 } \
4971 \
4972 case IEMMODE_64BIT: \
4973 { \
4974 IEM_MC_BEGIN(3, 3); \
4975 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4976 IEM_MC_ARG(uint64_t, u64Src, 1); \
4977 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4979 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4980 \
4981 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4982 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4983 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4984 IEMOP_HLP_DONE_DECODING(); \
4985 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4986 IEM_MC_FETCH_EFLAGS(EFlags); \
4987 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4988 \
4989 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
4990 IEM_MC_COMMIT_EFLAGS(EFlags); \
4991 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4992 IEM_MC_END(); \
4993 break; \
4994 } \
4995 \
4996 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4997 } \
4998 } \
4999 else \
5000 { \
5001 IEMOP_HLP_DONE_DECODING(); \
5002 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5003 } \
5004 } \
5005 (void)0
5006
5007/**
5008 * @opmaps grp1_83
5009 * @opcode /0
5010 */
5011FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5012{
5013 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5014 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
5015 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
5016}
5017
5018
5019/**
5020 * @opmaps grp1_83
5021 * @opcode /1
5022 */
5023FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5024{
5025 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5026 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
5027 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
5028}
5029
5030
5031/**
5032 * @opmaps grp1_83
5033 * @opcode /2
5034 */
5035FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5036{
5037 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5038 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5039 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5040}
5041
5042
5043/**
5044 * @opmaps grp1_83
5045 * @opcode /3
5046 */
5047FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5048{
5049 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5050 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5051 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5052}
5053
5054
5055/**
5056 * @opmaps grp1_83
5057 * @opcode /4
5058 */
5059FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5060{
5061 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5062 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5063 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5064}
5065
5066
5067/**
5068 * @opmaps grp1_83
5069 * @opcode /5
5070 */
5071FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5072{
5073 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5074 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5075 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5076}
5077
5078
5079/**
5080 * @opmaps grp1_83
5081 * @opcode /6
5082 */
5083FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5084{
5085 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5086 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5087 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5088}
5089
5090
5091/**
5092 * @opmaps grp1_83
5093 * @opcode /7
5094 */
5095FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5096{
5097 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5098 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5099}
5100
5101
5102/**
5103 * @opcode 0x83
5104 */
5105FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5106{
5107 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5108 to the 386 even if absent in the intel reference manuals and some
5109 3rd party opcode listings. */
5110 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5111 switch (IEM_GET_MODRM_REG_8(bRm))
5112 {
5113 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5114 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5115 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5116 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5117 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5118 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5119 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5120 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5121 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5122 }
5123}
5124
5125
5126/**
5127 * @opcode 0x84
5128 */
5129FNIEMOP_DEF(iemOp_test_Eb_Gb)
5130{
5131 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5132 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5133 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8);
5134 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
5135}
5136
5137
5138/**
5139 * @opcode 0x85
5140 */
5141FNIEMOP_DEF(iemOp_test_Ev_Gv)
5142{
5143 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5144 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5145 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64);
5146}
5147
5148
5149/**
5150 * @opcode 0x86
5151 */
5152FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5153{
5154 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5155 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5156
5157 /*
5158 * If rm is denoting a register, no more instruction bytes.
5159 */
5160 if (IEM_IS_MODRM_REG_MODE(bRm))
5161 {
5162 IEM_MC_BEGIN(0, 2);
5163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5164 IEM_MC_LOCAL(uint8_t, uTmp1);
5165 IEM_MC_LOCAL(uint8_t, uTmp2);
5166
5167 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5168 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5169 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5170 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5171
5172 IEM_MC_ADVANCE_RIP_AND_FINISH();
5173 IEM_MC_END();
5174 }
5175 else
5176 {
5177 /*
5178 * We're accessing memory.
5179 */
5180 IEM_MC_BEGIN(2, 4);
5181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5182 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5183 IEM_MC_LOCAL(uint8_t, uTmpReg);
5184 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
5185 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1);
5186
5187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5189 IEM_MC_MEM_MAP_U8_RW(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5190 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5191 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5192 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
5193 else
5194 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
5195 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Mem, bUnmapInfo);
5196 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5197
5198 IEM_MC_ADVANCE_RIP_AND_FINISH();
5199 IEM_MC_END();
5200 }
5201}
5202
5203
5204/**
5205 * @opcode 0x87
5206 */
5207FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5208{
5209 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5210 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5211
5212 /*
5213 * If rm is denoting a register, no more instruction bytes.
5214 */
5215 if (IEM_IS_MODRM_REG_MODE(bRm))
5216 {
5217 switch (pVCpu->iem.s.enmEffOpSize)
5218 {
5219 case IEMMODE_16BIT:
5220 IEM_MC_BEGIN(0, 2);
5221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5222 IEM_MC_LOCAL(uint16_t, uTmp1);
5223 IEM_MC_LOCAL(uint16_t, uTmp2);
5224
5225 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5226 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5227 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5228 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5229
5230 IEM_MC_ADVANCE_RIP_AND_FINISH();
5231 IEM_MC_END();
5232 break;
5233
5234 case IEMMODE_32BIT:
5235 IEM_MC_BEGIN(0, 2);
5236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5237 IEM_MC_LOCAL(uint32_t, uTmp1);
5238 IEM_MC_LOCAL(uint32_t, uTmp2);
5239
5240 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5241 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5242 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5243 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5244
5245 IEM_MC_ADVANCE_RIP_AND_FINISH();
5246 IEM_MC_END();
5247 break;
5248
5249 case IEMMODE_64BIT:
5250 IEM_MC_BEGIN(0, 2);
5251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5252 IEM_MC_LOCAL(uint64_t, uTmp1);
5253 IEM_MC_LOCAL(uint64_t, uTmp2);
5254
5255 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5256 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5257 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5258 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5259
5260 IEM_MC_ADVANCE_RIP_AND_FINISH();
5261 IEM_MC_END();
5262 break;
5263
5264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5265 }
5266 }
5267 else
5268 {
5269 /*
5270 * We're accessing memory.
5271 */
5272 switch (pVCpu->iem.s.enmEffOpSize)
5273 {
5274/** @todo the register must be committed separately! */
5275 case IEMMODE_16BIT:
5276 IEM_MC_BEGIN(2, 2);
5277 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
5278 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
5279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5280
5281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5283 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5284 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
5285 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5286 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
5287 else
5288 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
5289 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
5290
5291 IEM_MC_ADVANCE_RIP_AND_FINISH();
5292 IEM_MC_END();
5293 break;
5294
5295 case IEMMODE_32BIT:
5296 IEM_MC_BEGIN(2, 2);
5297 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
5298 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
5299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5300
5301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5303 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5304 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
5305 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5306 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
5307 else
5308 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
5309 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
5310
5311 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
5312 IEM_MC_ADVANCE_RIP_AND_FINISH();
5313 IEM_MC_END();
5314 break;
5315
5316 case IEMMODE_64BIT:
5317 IEM_MC_BEGIN(2, 2);
5318 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
5319 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
5320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5321
5322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5324 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5325 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
5326 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5327 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
5328 else
5329 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
5330 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
5331
5332 IEM_MC_ADVANCE_RIP_AND_FINISH();
5333 IEM_MC_END();
5334 break;
5335
5336 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5337 }
5338 }
5339}
5340
5341
5342/**
5343 * @opcode 0x88
5344 */
5345FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5346{
5347 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5348
5349 uint8_t bRm;
5350 IEM_OPCODE_GET_NEXT_U8(&bRm);
5351
5352 /*
5353 * If rm is denoting a register, no more instruction bytes.
5354 */
5355 if (IEM_IS_MODRM_REG_MODE(bRm))
5356 {
5357 IEM_MC_BEGIN(0, 1);
5358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5359 IEM_MC_LOCAL(uint8_t, u8Value);
5360 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5361 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5362 IEM_MC_ADVANCE_RIP_AND_FINISH();
5363 IEM_MC_END();
5364 }
5365 else
5366 {
5367 /*
5368 * We're writing a register to memory.
5369 */
5370 IEM_MC_BEGIN(0, 2);
5371 IEM_MC_LOCAL(uint8_t, u8Value);
5372 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5375 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5376 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5377 IEM_MC_ADVANCE_RIP_AND_FINISH();
5378 IEM_MC_END();
5379 }
5380}
5381
5382
5383/**
5384 * @opcode 0x89
5385 */
5386FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5387{
5388 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5389
5390 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5391
5392 /*
5393 * If rm is denoting a register, no more instruction bytes.
5394 */
5395 if (IEM_IS_MODRM_REG_MODE(bRm))
5396 {
5397 switch (pVCpu->iem.s.enmEffOpSize)
5398 {
5399 case IEMMODE_16BIT:
5400 IEM_MC_BEGIN(0, 1);
5401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5402 IEM_MC_LOCAL(uint16_t, u16Value);
5403 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5404 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5405 IEM_MC_ADVANCE_RIP_AND_FINISH();
5406 IEM_MC_END();
5407 break;
5408
5409 case IEMMODE_32BIT:
5410 IEM_MC_BEGIN(0, 1);
5411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5412 IEM_MC_LOCAL(uint32_t, u32Value);
5413 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5414 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5415 IEM_MC_ADVANCE_RIP_AND_FINISH();
5416 IEM_MC_END();
5417 break;
5418
5419 case IEMMODE_64BIT:
5420 IEM_MC_BEGIN(0, 1);
5421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5422 IEM_MC_LOCAL(uint64_t, u64Value);
5423 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5424 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5425 IEM_MC_ADVANCE_RIP_AND_FINISH();
5426 IEM_MC_END();
5427 break;
5428
5429 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5430 }
5431 }
5432 else
5433 {
5434 /*
5435 * We're writing a register to memory.
5436 */
5437 switch (pVCpu->iem.s.enmEffOpSize)
5438 {
5439 case IEMMODE_16BIT:
5440 IEM_MC_BEGIN(0, 2);
5441 IEM_MC_LOCAL(uint16_t, u16Value);
5442 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5445 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5446 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5447 IEM_MC_ADVANCE_RIP_AND_FINISH();
5448 IEM_MC_END();
5449 break;
5450
5451 case IEMMODE_32BIT:
5452 IEM_MC_BEGIN(0, 2);
5453 IEM_MC_LOCAL(uint32_t, u32Value);
5454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5455 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5457 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5458 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5459 IEM_MC_ADVANCE_RIP_AND_FINISH();
5460 IEM_MC_END();
5461 break;
5462
5463 case IEMMODE_64BIT:
5464 IEM_MC_BEGIN(0, 2);
5465 IEM_MC_LOCAL(uint64_t, u64Value);
5466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5469 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5470 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5471 IEM_MC_ADVANCE_RIP_AND_FINISH();
5472 IEM_MC_END();
5473 break;
5474
5475 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5476 }
5477 }
5478}
5479
5480
5481/**
5482 * @opcode 0x8a
5483 */
5484FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5485{
5486 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5487
5488 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5489
5490 /*
5491 * If rm is denoting a register, no more instruction bytes.
5492 */
5493 if (IEM_IS_MODRM_REG_MODE(bRm))
5494 {
5495 IEM_MC_BEGIN(0, 1);
5496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5497 IEM_MC_LOCAL(uint8_t, u8Value);
5498 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5499 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5500 IEM_MC_ADVANCE_RIP_AND_FINISH();
5501 IEM_MC_END();
5502 }
5503 else
5504 {
5505 /*
5506 * We're loading a register from memory.
5507 */
5508 IEM_MC_BEGIN(0, 2);
5509 IEM_MC_LOCAL(uint8_t, u8Value);
5510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5513 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5514 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5515 IEM_MC_ADVANCE_RIP_AND_FINISH();
5516 IEM_MC_END();
5517 }
5518}
5519
5520
5521/**
5522 * @opcode 0x8b
5523 */
5524FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5525{
5526 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5527
5528 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5529
5530 /*
5531 * If rm is denoting a register, no more instruction bytes.
5532 */
5533 if (IEM_IS_MODRM_REG_MODE(bRm))
5534 {
5535 switch (pVCpu->iem.s.enmEffOpSize)
5536 {
5537 case IEMMODE_16BIT:
5538 IEM_MC_BEGIN(0, 1);
5539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5540 IEM_MC_LOCAL(uint16_t, u16Value);
5541 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5542 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5543 IEM_MC_ADVANCE_RIP_AND_FINISH();
5544 IEM_MC_END();
5545 break;
5546
5547 case IEMMODE_32BIT:
5548 IEM_MC_BEGIN(0, 1);
5549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5550 IEM_MC_LOCAL(uint32_t, u32Value);
5551 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5552 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5553 IEM_MC_ADVANCE_RIP_AND_FINISH();
5554 IEM_MC_END();
5555 break;
5556
5557 case IEMMODE_64BIT:
5558 IEM_MC_BEGIN(0, 1);
5559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5560 IEM_MC_LOCAL(uint64_t, u64Value);
5561 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5562 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5563 IEM_MC_ADVANCE_RIP_AND_FINISH();
5564 IEM_MC_END();
5565 break;
5566
5567 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5568 }
5569 }
5570 else
5571 {
5572 /*
5573 * We're loading a register from memory.
5574 */
5575 switch (pVCpu->iem.s.enmEffOpSize)
5576 {
5577 case IEMMODE_16BIT:
5578 IEM_MC_BEGIN(0, 2);
5579 IEM_MC_LOCAL(uint16_t, u16Value);
5580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5583 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5584 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5585 IEM_MC_ADVANCE_RIP_AND_FINISH();
5586 IEM_MC_END();
5587 break;
5588
5589 case IEMMODE_32BIT:
5590 IEM_MC_BEGIN(0, 2);
5591 IEM_MC_LOCAL(uint32_t, u32Value);
5592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5595 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5596 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5597 IEM_MC_ADVANCE_RIP_AND_FINISH();
5598 IEM_MC_END();
5599 break;
5600
5601 case IEMMODE_64BIT:
5602 IEM_MC_BEGIN(0, 2);
5603 IEM_MC_LOCAL(uint64_t, u64Value);
5604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5607 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5608 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5609 IEM_MC_ADVANCE_RIP_AND_FINISH();
5610 IEM_MC_END();
5611 break;
5612
5613 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5614 }
5615 }
5616}
5617
5618
5619/**
5620 * opcode 0x63
5621 * @todo Table fixme
5622 */
5623FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5624{
5625 if (!IEM_IS_64BIT_CODE(pVCpu))
5626 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5627 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5628 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5629 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5630}
5631
5632
5633/**
5634 * @opcode 0x8c
5635 */
5636FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5637{
5638 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5639
5640 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5641
5642 /*
5643 * Check that the destination register exists. The REX.R prefix is ignored.
5644 */
5645 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5646 if (iSegReg > X86_SREG_GS)
5647 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5648
5649 /*
5650 * If rm is denoting a register, no more instruction bytes.
5651 * In that case, the operand size is respected and the upper bits are
5652 * cleared (starting with some pentium).
5653 */
5654 if (IEM_IS_MODRM_REG_MODE(bRm))
5655 {
5656 switch (pVCpu->iem.s.enmEffOpSize)
5657 {
5658 case IEMMODE_16BIT:
5659 IEM_MC_BEGIN(0, 1);
5660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5661 IEM_MC_LOCAL(uint16_t, u16Value);
5662 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5663 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5664 IEM_MC_ADVANCE_RIP_AND_FINISH();
5665 IEM_MC_END();
5666 break;
5667
5668 case IEMMODE_32BIT:
5669 IEM_MC_BEGIN(0, 1);
5670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5671 IEM_MC_LOCAL(uint32_t, u32Value);
5672 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5673 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5674 IEM_MC_ADVANCE_RIP_AND_FINISH();
5675 IEM_MC_END();
5676 break;
5677
5678 case IEMMODE_64BIT:
5679 IEM_MC_BEGIN(0, 1);
5680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5681 IEM_MC_LOCAL(uint64_t, u64Value);
5682 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5683 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5684 IEM_MC_ADVANCE_RIP_AND_FINISH();
5685 IEM_MC_END();
5686 break;
5687
5688 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5689 }
5690 }
5691 else
5692 {
5693 /*
5694 * We're saving the register to memory. The access is word sized
5695 * regardless of operand size prefixes.
5696 */
5697#if 0 /* not necessary */
5698 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5699#endif
5700 IEM_MC_BEGIN(0, 2);
5701 IEM_MC_LOCAL(uint16_t, u16Value);
5702 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5705 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5706 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5707 IEM_MC_ADVANCE_RIP_AND_FINISH();
5708 IEM_MC_END();
5709 }
5710}
5711
5712
5713
5714
5715/**
5716 * @opcode 0x8d
5717 */
5718FNIEMOP_DEF(iemOp_lea_Gv_M)
5719{
5720 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5721 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5722 if (IEM_IS_MODRM_REG_MODE(bRm))
5723 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5724
5725 switch (pVCpu->iem.s.enmEffOpSize)
5726 {
5727 case IEMMODE_16BIT:
5728 IEM_MC_BEGIN(0, 2);
5729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5730 IEM_MC_LOCAL(uint16_t, u16Cast);
5731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5733 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5734 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5735 IEM_MC_ADVANCE_RIP_AND_FINISH();
5736 IEM_MC_END();
5737 break;
5738
5739 case IEMMODE_32BIT:
5740 IEM_MC_BEGIN(0, 2);
5741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5742 IEM_MC_LOCAL(uint32_t, u32Cast);
5743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5745 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5746 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5747 IEM_MC_ADVANCE_RIP_AND_FINISH();
5748 IEM_MC_END();
5749 break;
5750
5751 case IEMMODE_64BIT:
5752 IEM_MC_BEGIN(0, 1);
5753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5756 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5757 IEM_MC_ADVANCE_RIP_AND_FINISH();
5758 IEM_MC_END();
5759 break;
5760
5761 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5762 }
5763}
5764
5765
5766/**
5767 * @opcode 0x8e
5768 */
5769FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5770{
5771 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5772
5773 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5774
5775 /*
5776 * The practical operand size is 16-bit.
5777 */
5778#if 0 /* not necessary */
5779 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5780#endif
5781
5782 /*
5783 * Check that the destination register exists and can be used with this
5784 * instruction. The REX.R prefix is ignored.
5785 */
5786 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5787 /** @todo r=bird: What does 8086 do here wrt CS? */
5788 if ( iSegReg == X86_SREG_CS
5789 || iSegReg > X86_SREG_GS)
5790 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5791
5792 /*
5793 * If rm is denoting a register, no more instruction bytes.
5794 */
5795 if (IEM_IS_MODRM_REG_MODE(bRm))
5796 {
5797 IEM_MC_BEGIN(2, 0);
5798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5799 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5800 IEM_MC_ARG(uint16_t, u16Value, 1);
5801 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5802 if (iSRegArg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5803 IEM_MC_CALL_CIMPL_2( 0, iemCImpl_load_SReg, iSRegArg, u16Value);
5804 else
5805 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE, iemCImpl_load_SReg, iSRegArg, u16Value);
5806 IEM_MC_END();
5807 }
5808 else
5809 {
5810 /*
5811 * We're loading the register from memory. The access is word sized
5812 * regardless of operand size prefixes.
5813 */
5814 IEM_MC_BEGIN(2, 1);
5815 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5816 IEM_MC_ARG(uint16_t, u16Value, 1);
5817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5820 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5821 if (iSRegArg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5822 IEM_MC_CALL_CIMPL_2( 0, iemCImpl_load_SReg, iSRegArg, u16Value);
5823 else
5824 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE, iemCImpl_load_SReg, iSRegArg, u16Value);
5825 IEM_MC_END();
5826 }
5827}
5828
5829
5830/** Opcode 0x8f /0. */
5831FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5832{
5833 /* This bugger is rather annoying as it requires rSP to be updated before
5834 doing the effective address calculations. Will eventually require a
5835 split between the R/M+SIB decoding and the effective address
5836 calculation - which is something that is required for any attempt at
5837 reusing this code for a recompiler. It may also be good to have if we
5838 need to delay #UD exception caused by invalid lock prefixes.
5839
5840 For now, we'll do a mostly safe interpreter-only implementation here. */
5841 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5842 * now until tests show it's checked.. */
5843 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5844
5845 /* Register access is relatively easy and can share code. */
5846 if (IEM_IS_MODRM_REG_MODE(bRm))
5847 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5848
5849 /*
5850 * Memory target.
5851 *
5852 * Intel says that RSP is incremented before it's used in any effective
5853 * address calcuations. This means some serious extra annoyance here since
5854 * we decode and calculate the effective address in one step and like to
5855 * delay committing registers till everything is done.
5856 *
5857 * So, we'll decode and calculate the effective address twice. This will
5858 * require some recoding if turned into a recompiler.
5859 */
5860 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5861
5862#if 1 /* This can be compiled, optimize later if needed. */
5863 switch (pVCpu->iem.s.enmEffOpSize)
5864 {
5865 case IEMMODE_16BIT:
5866 {
5867 IEM_MC_BEGIN(2, 0);
5868 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5869 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5870 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
5871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5872 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5873 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
5874 IEM_MC_END();
5875 }
5876
5877 case IEMMODE_32BIT:
5878 {
5879 IEM_MC_BEGIN(2, 0);
5880 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5881 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
5883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5884 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5885 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
5886 IEM_MC_END();
5887 }
5888
5889 case IEMMODE_64BIT:
5890 {
5891 IEM_MC_BEGIN(2, 0);
5892 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5893 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
5895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5896 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5897 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
5898 IEM_MC_END();
5899 }
5900
5901 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5902 }
5903
5904#else
5905# ifndef TST_IEM_CHECK_MC
5906 /* Calc effective address with modified ESP. */
5907/** @todo testcase */
5908 RTGCPTR GCPtrEff;
5909 VBOXSTRICTRC rcStrict;
5910 switch (pVCpu->iem.s.enmEffOpSize)
5911 {
5912 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
5913 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
5914 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
5915 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5916 }
5917 if (rcStrict != VINF_SUCCESS)
5918 return rcStrict;
5919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5920
5921 /* Perform the operation - this should be CImpl. */
5922 RTUINT64U TmpRsp;
5923 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
5924 switch (pVCpu->iem.s.enmEffOpSize)
5925 {
5926 case IEMMODE_16BIT:
5927 {
5928 uint16_t u16Value;
5929 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
5930 if (rcStrict == VINF_SUCCESS)
5931 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
5932 break;
5933 }
5934
5935 case IEMMODE_32BIT:
5936 {
5937 uint32_t u32Value;
5938 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
5939 if (rcStrict == VINF_SUCCESS)
5940 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
5941 break;
5942 }
5943
5944 case IEMMODE_64BIT:
5945 {
5946 uint64_t u64Value;
5947 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
5948 if (rcStrict == VINF_SUCCESS)
5949 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
5950 break;
5951 }
5952
5953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5954 }
5955 if (rcStrict == VINF_SUCCESS)
5956 {
5957 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
5958 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
5959 }
5960 return rcStrict;
5961
5962# else
5963 return VERR_IEM_IPE_2;
5964# endif
5965#endif
5966}
5967
5968
5969/**
5970 * @opcode 0x8f
5971 */
5972FNIEMOP_DEF(iemOp_Grp1A__xop)
5973{
5974 /*
5975 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
5976 * three byte VEX prefix, except that the mmmmm field cannot have the values
5977 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
5978 */
5979 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5980 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
5981 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
5982
5983 IEMOP_MNEMONIC(xop, "xop");
5984 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
5985 {
5986 /** @todo Test when exctly the XOP conformance checks kick in during
5987 * instruction decoding and fetching (using \#PF). */
5988 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
5989 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5990 if ( ( pVCpu->iem.s.fPrefixes
5991 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5992 == 0)
5993 {
5994 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
5995 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
5996 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
5997 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
5998 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
5999 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6000 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6001 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6002 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6003
6004 /** @todo XOP: Just use new tables and decoders. */
6005 switch (bRm & 0x1f)
6006 {
6007 case 8: /* xop opcode map 8. */
6008 IEMOP_BITCH_ABOUT_STUB();
6009 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6010
6011 case 9: /* xop opcode map 9. */
6012 IEMOP_BITCH_ABOUT_STUB();
6013 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6014
6015 case 10: /* xop opcode map 10. */
6016 IEMOP_BITCH_ABOUT_STUB();
6017 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6018
6019 default:
6020 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6021 IEMOP_RAISE_INVALID_OPCODE_RET();
6022 }
6023 }
6024 else
6025 Log(("XOP: Invalid prefix mix!\n"));
6026 }
6027 else
6028 Log(("XOP: XOP support disabled!\n"));
6029 IEMOP_RAISE_INVALID_OPCODE_RET();
6030}
6031
6032
6033/**
6034 * Common 'xchg reg,rAX' helper.
6035 */
6036FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6037{
6038 iReg |= pVCpu->iem.s.uRexB;
6039 switch (pVCpu->iem.s.enmEffOpSize)
6040 {
6041 case IEMMODE_16BIT:
6042 IEM_MC_BEGIN(0, 2);
6043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6044 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6045 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6046 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6047 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6048 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6049 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6050 IEM_MC_ADVANCE_RIP_AND_FINISH();
6051 IEM_MC_END();
6052 break;
6053
6054 case IEMMODE_32BIT:
6055 IEM_MC_BEGIN(0, 2);
6056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6057 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6058 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6059 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6060 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6061 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6062 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6063 IEM_MC_ADVANCE_RIP_AND_FINISH();
6064 IEM_MC_END();
6065 break;
6066
6067 case IEMMODE_64BIT:
6068 IEM_MC_BEGIN(0, 2);
6069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6070 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6071 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6072 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6073 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6074 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6075 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6076 IEM_MC_ADVANCE_RIP_AND_FINISH();
6077 IEM_MC_END();
6078 break;
6079
6080 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6081 }
6082}
6083
6084
6085/**
6086 * @opcode 0x90
6087 */
6088FNIEMOP_DEF(iemOp_nop)
6089{
6090 /* R8/R8D and RAX/EAX can be exchanged. */
6091 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6092 {
6093 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6094 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6095 }
6096
6097 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6098 {
6099 IEMOP_MNEMONIC(pause, "pause");
6100 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6101 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6102 if (!IEM_IS_IN_GUEST(pVCpu))
6103 { /* probable */ }
6104#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6105 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6106 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmx_pause);
6107#endif
6108#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6109 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6110 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_svm_pause);
6111#endif
6112 }
6113 else
6114 IEMOP_MNEMONIC(nop, "nop");
6115 /** @todo testcase: lock nop; lock pause */
6116 IEM_MC_BEGIN(0, 0);
6117 IEMOP_HLP_DONE_DECODING();
6118 IEM_MC_ADVANCE_RIP_AND_FINISH();
6119 IEM_MC_END();
6120}
6121
6122
6123/**
6124 * @opcode 0x91
6125 */
6126FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6127{
6128 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6129 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6130}
6131
6132
6133/**
6134 * @opcode 0x92
6135 */
6136FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6137{
6138 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6139 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6140}
6141
6142
6143/**
6144 * @opcode 0x93
6145 */
6146FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6147{
6148 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6149 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6150}
6151
6152
6153/**
6154 * @opcode 0x94
6155 */
6156FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6157{
6158 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6159 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6160}
6161
6162
6163/**
6164 * @opcode 0x95
6165 */
6166FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6167{
6168 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6169 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6170}
6171
6172
6173/**
6174 * @opcode 0x96
6175 */
6176FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6177{
6178 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6179 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6180}
6181
6182
6183/**
6184 * @opcode 0x97
6185 */
6186FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6187{
6188 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6189 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6190}
6191
6192
6193/**
6194 * @opcode 0x98
6195 */
6196FNIEMOP_DEF(iemOp_cbw)
6197{
6198 switch (pVCpu->iem.s.enmEffOpSize)
6199 {
6200 case IEMMODE_16BIT:
6201 IEMOP_MNEMONIC(cbw, "cbw");
6202 IEM_MC_BEGIN(0, 1);
6203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6204 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6205 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6206 } IEM_MC_ELSE() {
6207 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6208 } IEM_MC_ENDIF();
6209 IEM_MC_ADVANCE_RIP_AND_FINISH();
6210 IEM_MC_END();
6211 break;
6212
6213 case IEMMODE_32BIT:
6214 IEMOP_MNEMONIC(cwde, "cwde");
6215 IEM_MC_BEGIN(0, 1);
6216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6217 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6218 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6219 } IEM_MC_ELSE() {
6220 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6221 } IEM_MC_ENDIF();
6222 IEM_MC_ADVANCE_RIP_AND_FINISH();
6223 IEM_MC_END();
6224 break;
6225
6226 case IEMMODE_64BIT:
6227 IEMOP_MNEMONIC(cdqe, "cdqe");
6228 IEM_MC_BEGIN(0, 1);
6229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6230 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6231 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6232 } IEM_MC_ELSE() {
6233 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6234 } IEM_MC_ENDIF();
6235 IEM_MC_ADVANCE_RIP_AND_FINISH();
6236 IEM_MC_END();
6237 break;
6238
6239 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6240 }
6241}
6242
6243
6244/**
6245 * @opcode 0x99
6246 */
6247FNIEMOP_DEF(iemOp_cwd)
6248{
6249 switch (pVCpu->iem.s.enmEffOpSize)
6250 {
6251 case IEMMODE_16BIT:
6252 IEMOP_MNEMONIC(cwd, "cwd");
6253 IEM_MC_BEGIN(0, 1);
6254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6255 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6256 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6257 } IEM_MC_ELSE() {
6258 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6259 } IEM_MC_ENDIF();
6260 IEM_MC_ADVANCE_RIP_AND_FINISH();
6261 IEM_MC_END();
6262 break;
6263
6264 case IEMMODE_32BIT:
6265 IEMOP_MNEMONIC(cdq, "cdq");
6266 IEM_MC_BEGIN(0, 1);
6267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6268 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6269 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6270 } IEM_MC_ELSE() {
6271 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6272 } IEM_MC_ENDIF();
6273 IEM_MC_ADVANCE_RIP_AND_FINISH();
6274 IEM_MC_END();
6275 break;
6276
6277 case IEMMODE_64BIT:
6278 IEMOP_MNEMONIC(cqo, "cqo");
6279 IEM_MC_BEGIN(0, 1);
6280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6281 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6282 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6283 } IEM_MC_ELSE() {
6284 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6285 } IEM_MC_ENDIF();
6286 IEM_MC_ADVANCE_RIP_AND_FINISH();
6287 IEM_MC_END();
6288 break;
6289
6290 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6291 }
6292}
6293
6294
6295/**
6296 * @opcode 0x9a
6297 */
6298FNIEMOP_DEF(iemOp_call_Ap)
6299{
6300 IEMOP_MNEMONIC(call_Ap, "call Ap");
6301 IEMOP_HLP_NO_64BIT();
6302
6303 /* Decode the far pointer address and pass it on to the far call C implementation. */
6304 uint32_t off32Seg;
6305 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6306 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6307 else
6308 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6309 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6311 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
6312 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
6313 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6314}
6315
6316
6317/** Opcode 0x9b. (aka fwait) */
6318FNIEMOP_DEF(iemOp_wait)
6319{
6320 IEMOP_MNEMONIC(wait, "wait");
6321 IEM_MC_BEGIN(0, 0);
6322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6323 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6324 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6325 IEM_MC_ADVANCE_RIP_AND_FINISH();
6326 IEM_MC_END();
6327}
6328
6329
6330/**
6331 * @opcode 0x9c
6332 */
6333FNIEMOP_DEF(iemOp_pushf_Fv)
6334{
6335 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6337 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6338 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6339}
6340
6341
6342/**
6343 * @opcode 0x9d
6344 */
6345FNIEMOP_DEF(iemOp_popf_Fv)
6346{
6347 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6349 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6350 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6351 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6352}
6353
6354
6355/**
6356 * @opcode 0x9e
6357 */
6358FNIEMOP_DEF(iemOp_sahf)
6359{
6360 IEMOP_MNEMONIC(sahf, "sahf");
6361 if ( IEM_IS_64BIT_CODE(pVCpu)
6362 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6363 IEMOP_RAISE_INVALID_OPCODE_RET();
6364 IEM_MC_BEGIN(0, 2);
6365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6366 IEM_MC_LOCAL(uint32_t, u32Flags);
6367 IEM_MC_LOCAL(uint32_t, EFlags);
6368 IEM_MC_FETCH_EFLAGS(EFlags);
6369 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6370 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6371 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6372 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6373 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6374 IEM_MC_COMMIT_EFLAGS(EFlags);
6375 IEM_MC_ADVANCE_RIP_AND_FINISH();
6376 IEM_MC_END();
6377}
6378
6379
6380/**
6381 * @opcode 0x9f
6382 */
6383FNIEMOP_DEF(iemOp_lahf)
6384{
6385 IEMOP_MNEMONIC(lahf, "lahf");
6386 if ( IEM_IS_64BIT_CODE(pVCpu)
6387 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6388 IEMOP_RAISE_INVALID_OPCODE_RET();
6389 IEM_MC_BEGIN(0, 1);
6390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6391 IEM_MC_LOCAL(uint8_t, u8Flags);
6392 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6393 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6394 IEM_MC_ADVANCE_RIP_AND_FINISH();
6395 IEM_MC_END();
6396}
6397
6398
6399/**
6400 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6401 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6402 * Will return/throw on failures.
6403 * @param a_GCPtrMemOff The variable to store the offset in.
6404 */
6405#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6406 do \
6407 { \
6408 switch (pVCpu->iem.s.enmEffAddrMode) \
6409 { \
6410 case IEMMODE_16BIT: \
6411 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6412 break; \
6413 case IEMMODE_32BIT: \
6414 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6415 break; \
6416 case IEMMODE_64BIT: \
6417 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6418 break; \
6419 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6420 } \
6421 } while (0)
6422
6423/**
6424 * @opcode 0xa0
6425 */
6426FNIEMOP_DEF(iemOp_mov_AL_Ob)
6427{
6428 /*
6429 * Get the offset.
6430 */
6431 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6432 RTGCPTR GCPtrMemOff;
6433 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6434
6435 /*
6436 * Fetch AL.
6437 */
6438 IEM_MC_BEGIN(0,1);
6439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6440 IEM_MC_LOCAL(uint8_t, u8Tmp);
6441 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6442 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6443 IEM_MC_ADVANCE_RIP_AND_FINISH();
6444 IEM_MC_END();
6445}
6446
6447
6448/**
6449 * @opcode 0xa1
6450 */
6451FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6452{
6453 /*
6454 * Get the offset.
6455 */
6456 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6457 RTGCPTR GCPtrMemOff;
6458 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6459
6460 /*
6461 * Fetch rAX.
6462 */
6463 switch (pVCpu->iem.s.enmEffOpSize)
6464 {
6465 case IEMMODE_16BIT:
6466 IEM_MC_BEGIN(0,1);
6467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6468 IEM_MC_LOCAL(uint16_t, u16Tmp);
6469 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6470 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6471 IEM_MC_ADVANCE_RIP_AND_FINISH();
6472 IEM_MC_END();
6473 break;
6474
6475 case IEMMODE_32BIT:
6476 IEM_MC_BEGIN(0,1);
6477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6478 IEM_MC_LOCAL(uint32_t, u32Tmp);
6479 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6480 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6481 IEM_MC_ADVANCE_RIP_AND_FINISH();
6482 IEM_MC_END();
6483 break;
6484
6485 case IEMMODE_64BIT:
6486 IEM_MC_BEGIN(0,1);
6487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6488 IEM_MC_LOCAL(uint64_t, u64Tmp);
6489 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6490 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6491 IEM_MC_ADVANCE_RIP_AND_FINISH();
6492 IEM_MC_END();
6493 break;
6494
6495 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6496 }
6497}
6498
6499
6500/**
6501 * @opcode 0xa2
6502 */
6503FNIEMOP_DEF(iemOp_mov_Ob_AL)
6504{
6505 /*
6506 * Get the offset.
6507 */
6508 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6509 RTGCPTR GCPtrMemOff;
6510 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6511
6512 /*
6513 * Store AL.
6514 */
6515 IEM_MC_BEGIN(0,1);
6516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6517 IEM_MC_LOCAL(uint8_t, u8Tmp);
6518 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6519 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6520 IEM_MC_ADVANCE_RIP_AND_FINISH();
6521 IEM_MC_END();
6522}
6523
6524
6525/**
6526 * @opcode 0xa3
6527 */
6528FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6529{
6530 /*
6531 * Get the offset.
6532 */
6533 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6534 RTGCPTR GCPtrMemOff;
6535 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6536
6537 /*
6538 * Store rAX.
6539 */
6540 switch (pVCpu->iem.s.enmEffOpSize)
6541 {
6542 case IEMMODE_16BIT:
6543 IEM_MC_BEGIN(0,1);
6544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6545 IEM_MC_LOCAL(uint16_t, u16Tmp);
6546 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6547 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6548 IEM_MC_ADVANCE_RIP_AND_FINISH();
6549 IEM_MC_END();
6550 break;
6551
6552 case IEMMODE_32BIT:
6553 IEM_MC_BEGIN(0,1);
6554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6555 IEM_MC_LOCAL(uint32_t, u32Tmp);
6556 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6557 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6558 IEM_MC_ADVANCE_RIP_AND_FINISH();
6559 IEM_MC_END();
6560 break;
6561
6562 case IEMMODE_64BIT:
6563 IEM_MC_BEGIN(0,1);
6564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6565 IEM_MC_LOCAL(uint64_t, u64Tmp);
6566 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6567 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6568 IEM_MC_ADVANCE_RIP_AND_FINISH();
6569 IEM_MC_END();
6570 break;
6571
6572 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6573 }
6574}
6575
6576/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6577#define IEM_MOVS_CASE(ValBits, AddrBits) \
6578 IEM_MC_BEGIN(0, 2); \
6579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6580 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6581 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6582 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6583 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6584 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6585 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6586 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6587 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6588 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6589 } IEM_MC_ELSE() { \
6590 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6591 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6592 } IEM_MC_ENDIF(); \
6593 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6594 IEM_MC_END() \
6595
6596/**
6597 * @opcode 0xa4
6598 */
6599FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6600{
6601 /*
6602 * Use the C implementation if a repeat prefix is encountered.
6603 */
6604 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6605 {
6606 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6608 switch (pVCpu->iem.s.enmEffAddrMode)
6609 {
6610 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6611 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6612 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6613 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6614 }
6615 }
6616
6617 /*
6618 * Sharing case implementation with movs[wdq] below.
6619 */
6620 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6621 switch (pVCpu->iem.s.enmEffAddrMode)
6622 {
6623 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
6624 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
6625 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
6626 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6627 }
6628}
6629
6630
6631/**
6632 * @opcode 0xa5
6633 */
6634FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6635{
6636
6637 /*
6638 * Use the C implementation if a repeat prefix is encountered.
6639 */
6640 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6641 {
6642 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6644 switch (pVCpu->iem.s.enmEffOpSize)
6645 {
6646 case IEMMODE_16BIT:
6647 switch (pVCpu->iem.s.enmEffAddrMode)
6648 {
6649 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6650 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6651 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6652 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6653 }
6654 break;
6655 case IEMMODE_32BIT:
6656 switch (pVCpu->iem.s.enmEffAddrMode)
6657 {
6658 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6659 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6660 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6661 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6662 }
6663 case IEMMODE_64BIT:
6664 switch (pVCpu->iem.s.enmEffAddrMode)
6665 {
6666 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6667 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6668 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6669 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6670 }
6671 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6672 }
6673 }
6674
6675 /*
6676 * Annoying double switch here.
6677 * Using ugly macro for implementing the cases, sharing it with movsb.
6678 */
6679 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6680 switch (pVCpu->iem.s.enmEffOpSize)
6681 {
6682 case IEMMODE_16BIT:
6683 switch (pVCpu->iem.s.enmEffAddrMode)
6684 {
6685 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
6686 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
6687 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
6688 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6689 }
6690 break;
6691
6692 case IEMMODE_32BIT:
6693 switch (pVCpu->iem.s.enmEffAddrMode)
6694 {
6695 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
6696 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
6697 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
6698 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6699 }
6700 break;
6701
6702 case IEMMODE_64BIT:
6703 switch (pVCpu->iem.s.enmEffAddrMode)
6704 {
6705 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6706 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
6707 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
6708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6709 }
6710 break;
6711 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6712 }
6713}
6714
6715#undef IEM_MOVS_CASE
6716
6717/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6718#define IEM_CMPS_CASE(ValBits, AddrBits) \
6719 IEM_MC_BEGIN(3, 3); \
6720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6721 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
6722 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6723 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6724 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6725 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6726 \
6727 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6728 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
6729 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6730 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
6731 IEM_MC_REF_LOCAL(puValue1, uValue1); \
6732 IEM_MC_REF_EFLAGS(pEFlags); \
6733 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6734 \
6735 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6736 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6737 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6738 } IEM_MC_ELSE() { \
6739 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6740 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6741 } IEM_MC_ENDIF(); \
6742 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6743 IEM_MC_END() \
6744
6745/**
6746 * @opcode 0xa6
6747 */
6748FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6749{
6750
6751 /*
6752 * Use the C implementation if a repeat prefix is encountered.
6753 */
6754 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6755 {
6756 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6758 switch (pVCpu->iem.s.enmEffAddrMode)
6759 {
6760 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6761 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6762 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6763 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6764 }
6765 }
6766 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6767 {
6768 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6770 switch (pVCpu->iem.s.enmEffAddrMode)
6771 {
6772 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6773 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6774 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6776 }
6777 }
6778
6779 /*
6780 * Sharing case implementation with cmps[wdq] below.
6781 */
6782 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
6783 switch (pVCpu->iem.s.enmEffAddrMode)
6784 {
6785 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
6786 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
6787 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
6788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6789 }
6790}
6791
6792
6793/**
6794 * @opcode 0xa7
6795 */
6796FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
6797{
6798 /*
6799 * Use the C implementation if a repeat prefix is encountered.
6800 */
6801 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6802 {
6803 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
6804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6805 switch (pVCpu->iem.s.enmEffOpSize)
6806 {
6807 case IEMMODE_16BIT:
6808 switch (pVCpu->iem.s.enmEffAddrMode)
6809 {
6810 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6811 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6812 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6813 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6814 }
6815 break;
6816 case IEMMODE_32BIT:
6817 switch (pVCpu->iem.s.enmEffAddrMode)
6818 {
6819 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6820 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6821 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6822 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6823 }
6824 case IEMMODE_64BIT:
6825 switch (pVCpu->iem.s.enmEffAddrMode)
6826 {
6827 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
6828 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6829 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6831 }
6832 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6833 }
6834 }
6835
6836 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6837 {
6838 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
6839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6840 switch (pVCpu->iem.s.enmEffOpSize)
6841 {
6842 case IEMMODE_16BIT:
6843 switch (pVCpu->iem.s.enmEffAddrMode)
6844 {
6845 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6846 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6847 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6848 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6849 }
6850 break;
6851 case IEMMODE_32BIT:
6852 switch (pVCpu->iem.s.enmEffAddrMode)
6853 {
6854 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6855 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6856 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6857 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6858 }
6859 case IEMMODE_64BIT:
6860 switch (pVCpu->iem.s.enmEffAddrMode)
6861 {
6862 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
6863 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6864 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6865 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6866 }
6867 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6868 }
6869 }
6870
6871 /*
6872 * Annoying double switch here.
6873 * Using ugly macro for implementing the cases, sharing it with cmpsb.
6874 */
6875 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
6876 switch (pVCpu->iem.s.enmEffOpSize)
6877 {
6878 case IEMMODE_16BIT:
6879 switch (pVCpu->iem.s.enmEffAddrMode)
6880 {
6881 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
6882 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
6883 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
6884 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6885 }
6886 break;
6887
6888 case IEMMODE_32BIT:
6889 switch (pVCpu->iem.s.enmEffAddrMode)
6890 {
6891 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
6892 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
6893 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
6894 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6895 }
6896 break;
6897
6898 case IEMMODE_64BIT:
6899 switch (pVCpu->iem.s.enmEffAddrMode)
6900 {
6901 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6902 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
6903 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
6904 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6905 }
6906 break;
6907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6908 }
6909}
6910
6911#undef IEM_CMPS_CASE
6912
6913/**
6914 * @opcode 0xa8
6915 */
6916FNIEMOP_DEF(iemOp_test_AL_Ib)
6917{
6918 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
6919 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6920 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
6921}
6922
6923
6924/**
6925 * @opcode 0xa9
6926 */
6927FNIEMOP_DEF(iemOp_test_eAX_Iz)
6928{
6929 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
6930 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6931 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
6932}
6933
6934
6935/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
6936#define IEM_STOS_CASE(ValBits, AddrBits) \
6937 IEM_MC_BEGIN(0, 2); \
6938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6939 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6940 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6941 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
6942 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6943 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6944 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6945 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6946 } IEM_MC_ELSE() { \
6947 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6948 } IEM_MC_ENDIF(); \
6949 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6950 IEM_MC_END() \
6951
6952/**
6953 * @opcode 0xaa
6954 */
6955FNIEMOP_DEF(iemOp_stosb_Yb_AL)
6956{
6957 /*
6958 * Use the C implementation if a repeat prefix is encountered.
6959 */
6960 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6961 {
6962 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
6963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6964 switch (pVCpu->iem.s.enmEffAddrMode)
6965 {
6966 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m16);
6967 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m32);
6968 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m64);
6969 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6970 }
6971 }
6972
6973 /*
6974 * Sharing case implementation with stos[wdq] below.
6975 */
6976 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
6977 switch (pVCpu->iem.s.enmEffAddrMode)
6978 {
6979 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
6980 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
6981 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
6982 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6983 }
6984}
6985
6986
6987/**
6988 * @opcode 0xab
6989 */
6990FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
6991{
6992 /*
6993 * Use the C implementation if a repeat prefix is encountered.
6994 */
6995 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6996 {
6997 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
6998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6999 switch (pVCpu->iem.s.enmEffOpSize)
7000 {
7001 case IEMMODE_16BIT:
7002 switch (pVCpu->iem.s.enmEffAddrMode)
7003 {
7004 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m16);
7005 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m32);
7006 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m64);
7007 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7008 }
7009 break;
7010 case IEMMODE_32BIT:
7011 switch (pVCpu->iem.s.enmEffAddrMode)
7012 {
7013 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m16);
7014 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m32);
7015 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m64);
7016 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7017 }
7018 case IEMMODE_64BIT:
7019 switch (pVCpu->iem.s.enmEffAddrMode)
7020 {
7021 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7022 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m32);
7023 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m64);
7024 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7025 }
7026 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7027 }
7028 }
7029
7030 /*
7031 * Annoying double switch here.
7032 * Using ugly macro for implementing the cases, sharing it with stosb.
7033 */
7034 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7035 switch (pVCpu->iem.s.enmEffOpSize)
7036 {
7037 case IEMMODE_16BIT:
7038 switch (pVCpu->iem.s.enmEffAddrMode)
7039 {
7040 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
7041 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
7042 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
7043 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7044 }
7045 break;
7046
7047 case IEMMODE_32BIT:
7048 switch (pVCpu->iem.s.enmEffAddrMode)
7049 {
7050 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
7051 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
7052 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
7053 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7054 }
7055 break;
7056
7057 case IEMMODE_64BIT:
7058 switch (pVCpu->iem.s.enmEffAddrMode)
7059 {
7060 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7061 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
7062 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
7063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7064 }
7065 break;
7066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7067 }
7068}
7069
7070#undef IEM_STOS_CASE
7071
7072/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7073#define IEM_LODS_CASE(ValBits, AddrBits) \
7074 IEM_MC_BEGIN(0, 2); \
7075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7076 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7077 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7078 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7079 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7080 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7081 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7082 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7083 } IEM_MC_ELSE() { \
7084 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7085 } IEM_MC_ENDIF(); \
7086 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7087 IEM_MC_END() \
7088
7089/**
7090 * @opcode 0xac
7091 */
7092FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7093{
7094 /*
7095 * Use the C implementation if a repeat prefix is encountered.
7096 */
7097 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7098 {
7099 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7101 switch (pVCpu->iem.s.enmEffAddrMode)
7102 {
7103 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7104 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7105 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7106 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7107 }
7108 }
7109
7110 /*
7111 * Sharing case implementation with stos[wdq] below.
7112 */
7113 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7114 switch (pVCpu->iem.s.enmEffAddrMode)
7115 {
7116 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
7117 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
7118 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
7119 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7120 }
7121}
7122
7123
7124/**
7125 * @opcode 0xad
7126 */
7127FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7128{
7129 /*
7130 * Use the C implementation if a repeat prefix is encountered.
7131 */
7132 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7133 {
7134 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7136 switch (pVCpu->iem.s.enmEffOpSize)
7137 {
7138 case IEMMODE_16BIT:
7139 switch (pVCpu->iem.s.enmEffAddrMode)
7140 {
7141 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7142 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7143 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7144 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7145 }
7146 break;
7147 case IEMMODE_32BIT:
7148 switch (pVCpu->iem.s.enmEffAddrMode)
7149 {
7150 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7151 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7152 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7153 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7154 }
7155 case IEMMODE_64BIT:
7156 switch (pVCpu->iem.s.enmEffAddrMode)
7157 {
7158 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7159 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7160 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7162 }
7163 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7164 }
7165 }
7166
7167 /*
7168 * Annoying double switch here.
7169 * Using ugly macro for implementing the cases, sharing it with lodsb.
7170 */
7171 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7172 switch (pVCpu->iem.s.enmEffOpSize)
7173 {
7174 case IEMMODE_16BIT:
7175 switch (pVCpu->iem.s.enmEffAddrMode)
7176 {
7177 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
7178 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
7179 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
7180 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7181 }
7182 break;
7183
7184 case IEMMODE_32BIT:
7185 switch (pVCpu->iem.s.enmEffAddrMode)
7186 {
7187 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
7188 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
7189 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
7190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7191 }
7192 break;
7193
7194 case IEMMODE_64BIT:
7195 switch (pVCpu->iem.s.enmEffAddrMode)
7196 {
7197 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7198 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
7199 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
7200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7201 }
7202 break;
7203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7204 }
7205}
7206
7207#undef IEM_LODS_CASE
7208
7209/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7210#define IEM_SCAS_CASE(ValBits, AddrBits) \
7211 IEM_MC_BEGIN(3, 2); \
7212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7213 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7214 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7215 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7216 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7217 \
7218 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7219 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7220 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7221 IEM_MC_REF_EFLAGS(pEFlags); \
7222 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7223 \
7224 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7225 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7226 } IEM_MC_ELSE() { \
7227 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7228 } IEM_MC_ENDIF(); \
7229 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7230 IEM_MC_END();
7231
7232/**
7233 * @opcode 0xae
7234 */
7235FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7236{
7237 /*
7238 * Use the C implementation if a repeat prefix is encountered.
7239 */
7240 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7241 {
7242 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7244 switch (pVCpu->iem.s.enmEffAddrMode)
7245 {
7246 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m16);
7247 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m32);
7248 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m64);
7249 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7250 }
7251 }
7252 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7253 {
7254 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7256 switch (pVCpu->iem.s.enmEffAddrMode)
7257 {
7258 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m16);
7259 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m32);
7260 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m64);
7261 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7262 }
7263 }
7264
7265 /*
7266 * Sharing case implementation with stos[wdq] below.
7267 */
7268 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7269 switch (pVCpu->iem.s.enmEffAddrMode)
7270 {
7271 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
7272 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
7273 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
7274 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7275 }
7276}
7277
7278
7279/**
7280 * @opcode 0xaf
7281 */
7282FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7283{
7284 /*
7285 * Use the C implementation if a repeat prefix is encountered.
7286 */
7287 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7288 {
7289 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7291 switch (pVCpu->iem.s.enmEffOpSize)
7292 {
7293 case IEMMODE_16BIT:
7294 switch (pVCpu->iem.s.enmEffAddrMode)
7295 {
7296 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m16);
7297 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m32);
7298 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m64);
7299 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7300 }
7301 break;
7302 case IEMMODE_32BIT:
7303 switch (pVCpu->iem.s.enmEffAddrMode)
7304 {
7305 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m16);
7306 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m32);
7307 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m64);
7308 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7309 }
7310 case IEMMODE_64BIT:
7311 switch (pVCpu->iem.s.enmEffAddrMode)
7312 {
7313 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7314 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m32);
7315 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m64);
7316 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7317 }
7318 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7319 }
7320 }
7321 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7322 {
7323 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7325 switch (pVCpu->iem.s.enmEffOpSize)
7326 {
7327 case IEMMODE_16BIT:
7328 switch (pVCpu->iem.s.enmEffAddrMode)
7329 {
7330 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m16);
7331 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m32);
7332 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m64);
7333 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7334 }
7335 break;
7336 case IEMMODE_32BIT:
7337 switch (pVCpu->iem.s.enmEffAddrMode)
7338 {
7339 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m16);
7340 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m32);
7341 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m64);
7342 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7343 }
7344 case IEMMODE_64BIT:
7345 switch (pVCpu->iem.s.enmEffAddrMode)
7346 {
7347 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
7348 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m32);
7349 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m64);
7350 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7351 }
7352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7353 }
7354 }
7355
7356 /*
7357 * Annoying double switch here.
7358 * Using ugly macro for implementing the cases, sharing it with scasb.
7359 */
7360 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
7361 switch (pVCpu->iem.s.enmEffOpSize)
7362 {
7363 case IEMMODE_16BIT:
7364 switch (pVCpu->iem.s.enmEffAddrMode)
7365 {
7366 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
7367 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
7368 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
7369 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7370 }
7371 break;
7372
7373 case IEMMODE_32BIT:
7374 switch (pVCpu->iem.s.enmEffAddrMode)
7375 {
7376 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
7377 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
7378 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
7379 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7380 }
7381 break;
7382
7383 case IEMMODE_64BIT:
7384 switch (pVCpu->iem.s.enmEffAddrMode)
7385 {
7386 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7387 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
7388 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
7389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7390 }
7391 break;
7392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7393 }
7394}
7395
7396#undef IEM_SCAS_CASE
7397
7398/**
7399 * Common 'mov r8, imm8' helper.
7400 */
7401FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
7402{
7403 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7404 IEM_MC_BEGIN(0, 1);
7405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7406 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
7407 IEM_MC_STORE_GREG_U8(iFixedReg, u8Value);
7408 IEM_MC_ADVANCE_RIP_AND_FINISH();
7409 IEM_MC_END();
7410}
7411
7412
7413/**
7414 * @opcode 0xb0
7415 */
7416FNIEMOP_DEF(iemOp_mov_AL_Ib)
7417{
7418 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
7419 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7420}
7421
7422
7423/**
7424 * @opcode 0xb1
7425 */
7426FNIEMOP_DEF(iemOp_CL_Ib)
7427{
7428 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
7429 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7430}
7431
7432
7433/**
7434 * @opcode 0xb2
7435 */
7436FNIEMOP_DEF(iemOp_DL_Ib)
7437{
7438 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
7439 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7440}
7441
7442
7443/**
7444 * @opcode 0xb3
7445 */
7446FNIEMOP_DEF(iemOp_BL_Ib)
7447{
7448 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
7449 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7450}
7451
7452
7453/**
7454 * @opcode 0xb4
7455 */
7456FNIEMOP_DEF(iemOp_mov_AH_Ib)
7457{
7458 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
7459 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7460}
7461
7462
7463/**
7464 * @opcode 0xb5
7465 */
7466FNIEMOP_DEF(iemOp_CH_Ib)
7467{
7468 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
7469 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7470}
7471
7472
7473/**
7474 * @opcode 0xb6
7475 */
7476FNIEMOP_DEF(iemOp_DH_Ib)
7477{
7478 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
7479 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7480}
7481
7482
7483/**
7484 * @opcode 0xb7
7485 */
7486FNIEMOP_DEF(iemOp_BH_Ib)
7487{
7488 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
7489 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7490}
7491
7492
7493/**
7494 * Common 'mov regX,immX' helper.
7495 */
7496FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
7497{
7498 switch (pVCpu->iem.s.enmEffOpSize)
7499 {
7500 case IEMMODE_16BIT:
7501 {
7502 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7503 IEM_MC_BEGIN(0, 1);
7504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7505 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
7506 IEM_MC_STORE_GREG_U16(iFixedReg, u16Value);
7507 IEM_MC_ADVANCE_RIP_AND_FINISH();
7508 IEM_MC_END();
7509 break;
7510 }
7511
7512 case IEMMODE_32BIT:
7513 {
7514 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7515 IEM_MC_BEGIN(0, 1);
7516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7517 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
7518 IEM_MC_STORE_GREG_U32(iFixedReg, u32Value);
7519 IEM_MC_ADVANCE_RIP_AND_FINISH();
7520 IEM_MC_END();
7521 break;
7522 }
7523 case IEMMODE_64BIT:
7524 {
7525 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
7526 IEM_MC_BEGIN(0, 1);
7527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7528 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
7529 IEM_MC_STORE_GREG_U64(iFixedReg, u64Value);
7530 IEM_MC_ADVANCE_RIP_AND_FINISH();
7531 IEM_MC_END();
7532 break;
7533 }
7534 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7535 }
7536}
7537
7538
7539/**
7540 * @opcode 0xb8
7541 */
7542FNIEMOP_DEF(iemOp_eAX_Iv)
7543{
7544 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
7545 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7546}
7547
7548
7549/**
7550 * @opcode 0xb9
7551 */
7552FNIEMOP_DEF(iemOp_eCX_Iv)
7553{
7554 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
7555 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7556}
7557
7558
7559/**
7560 * @opcode 0xba
7561 */
7562FNIEMOP_DEF(iemOp_eDX_Iv)
7563{
7564 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
7565 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7566}
7567
7568
7569/**
7570 * @opcode 0xbb
7571 */
7572FNIEMOP_DEF(iemOp_eBX_Iv)
7573{
7574 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
7575 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7576}
7577
7578
7579/**
7580 * @opcode 0xbc
7581 */
7582FNIEMOP_DEF(iemOp_eSP_Iv)
7583{
7584 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
7585 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7586}
7587
7588
7589/**
7590 * @opcode 0xbd
7591 */
7592FNIEMOP_DEF(iemOp_eBP_Iv)
7593{
7594 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
7595 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7596}
7597
7598
7599/**
7600 * @opcode 0xbe
7601 */
7602FNIEMOP_DEF(iemOp_eSI_Iv)
7603{
7604 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
7605 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7606}
7607
7608
7609/**
7610 * @opcode 0xbf
7611 */
7612FNIEMOP_DEF(iemOp_eDI_Iv)
7613{
7614 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
7615 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7616}
7617
7618
7619/**
7620 * @opcode 0xc0
7621 */
7622FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
7623{
7624 IEMOP_HLP_MIN_186();
7625 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7626 PCIEMOPSHIFTSIZES pImpl;
7627 switch (IEM_GET_MODRM_REG_8(bRm))
7628 {
7629 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
7630 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
7631 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
7632 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
7633 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
7634 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
7635 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
7636 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7637 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7638 }
7639 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7640
7641 if (IEM_IS_MODRM_REG_MODE(bRm))
7642 {
7643 /* register */
7644 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7645 IEM_MC_BEGIN(3, 0);
7646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7647 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7648 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7649 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7650 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7651 IEM_MC_REF_EFLAGS(pEFlags);
7652 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7653 IEM_MC_ADVANCE_RIP_AND_FINISH();
7654 IEM_MC_END();
7655 }
7656 else
7657 {
7658 /* memory */
7659 IEM_MC_BEGIN(3, 3);
7660 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7661 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7662 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7664 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7665
7666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7667 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7668 IEM_MC_ASSIGN(cShiftArg, cShift);
7669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7670 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7671 IEM_MC_FETCH_EFLAGS(EFlags);
7672 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7673
7674 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
7675 IEM_MC_COMMIT_EFLAGS(EFlags);
7676 IEM_MC_ADVANCE_RIP_AND_FINISH();
7677 IEM_MC_END();
7678 }
7679}
7680
7681
7682/**
7683 * @opcode 0xc1
7684 */
7685FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
7686{
7687 IEMOP_HLP_MIN_186();
7688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7689 PCIEMOPSHIFTSIZES pImpl;
7690 switch (IEM_GET_MODRM_REG_8(bRm))
7691 {
7692 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
7693 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
7694 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
7695 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
7696 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
7697 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
7698 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
7699 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7700 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7701 }
7702 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7703
7704 if (IEM_IS_MODRM_REG_MODE(bRm))
7705 {
7706 /* register */
7707 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7708 switch (pVCpu->iem.s.enmEffOpSize)
7709 {
7710 case IEMMODE_16BIT:
7711 IEM_MC_BEGIN(3, 0);
7712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7713 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7714 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7715 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7716 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7717 IEM_MC_REF_EFLAGS(pEFlags);
7718 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7719 IEM_MC_ADVANCE_RIP_AND_FINISH();
7720 IEM_MC_END();
7721 break;
7722
7723 case IEMMODE_32BIT:
7724 IEM_MC_BEGIN(3, 0);
7725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7726 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7727 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7728 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7729 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7730 IEM_MC_REF_EFLAGS(pEFlags);
7731 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7732 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7733 IEM_MC_ADVANCE_RIP_AND_FINISH();
7734 IEM_MC_END();
7735 break;
7736
7737 case IEMMODE_64BIT:
7738 IEM_MC_BEGIN(3, 0);
7739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7740 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7741 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7742 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7743 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7744 IEM_MC_REF_EFLAGS(pEFlags);
7745 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7746 IEM_MC_ADVANCE_RIP_AND_FINISH();
7747 IEM_MC_END();
7748 break;
7749
7750 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7751 }
7752 }
7753 else
7754 {
7755 /* memory */
7756 switch (pVCpu->iem.s.enmEffOpSize)
7757 {
7758 case IEMMODE_16BIT:
7759 IEM_MC_BEGIN(3, 2);
7760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7761 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7762 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7764
7765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7766 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7767 IEM_MC_ASSIGN(cShiftArg, cShift);
7768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7769 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7770 IEM_MC_FETCH_EFLAGS(EFlags);
7771 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7772
7773 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7774 IEM_MC_COMMIT_EFLAGS(EFlags);
7775 IEM_MC_ADVANCE_RIP_AND_FINISH();
7776 IEM_MC_END();
7777 break;
7778
7779 case IEMMODE_32BIT:
7780 IEM_MC_BEGIN(3, 2);
7781 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7782 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7783 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7785
7786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7787 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7788 IEM_MC_ASSIGN(cShiftArg, cShift);
7789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7790 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7791 IEM_MC_FETCH_EFLAGS(EFlags);
7792 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7793
7794 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7795 IEM_MC_COMMIT_EFLAGS(EFlags);
7796 IEM_MC_ADVANCE_RIP_AND_FINISH();
7797 IEM_MC_END();
7798 break;
7799
7800 case IEMMODE_64BIT:
7801 IEM_MC_BEGIN(3, 2);
7802 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7803 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7804 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7806
7807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7808 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7809 IEM_MC_ASSIGN(cShiftArg, cShift);
7810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7811 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7812 IEM_MC_FETCH_EFLAGS(EFlags);
7813 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7814
7815 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7816 IEM_MC_COMMIT_EFLAGS(EFlags);
7817 IEM_MC_ADVANCE_RIP_AND_FINISH();
7818 IEM_MC_END();
7819 break;
7820
7821 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7822 }
7823 }
7824}
7825
7826
7827/**
7828 * @opcode 0xc2
7829 */
7830FNIEMOP_DEF(iemOp_retn_Iw)
7831{
7832 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
7833 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7834 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7836 switch (pVCpu->iem.s.enmEffOpSize)
7837 {
7838 case IEMMODE_16BIT:
7839 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_16, u16Imm);
7840 case IEMMODE_32BIT:
7841 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_32, u16Imm);
7842 case IEMMODE_64BIT:
7843 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_64, u16Imm);
7844 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7845 }
7846}
7847
7848
7849/**
7850 * @opcode 0xc3
7851 */
7852FNIEMOP_DEF(iemOp_retn)
7853{
7854 IEMOP_MNEMONIC(retn, "retn");
7855 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7857 switch (pVCpu->iem.s.enmEffOpSize)
7858 {
7859 case IEMMODE_16BIT:
7860 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_16);
7861 case IEMMODE_32BIT:
7862 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_32);
7863 case IEMMODE_64BIT:
7864 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_64);
7865 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7866 }
7867}
7868
7869
7870/**
7871 * @opcode 0xc4
7872 */
7873FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
7874{
7875 /* The LDS instruction is invalid 64-bit mode. In legacy and
7876 compatability mode it is invalid with MOD=3.
7877 The use as a VEX prefix is made possible by assigning the inverted
7878 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
7879 outside of 64-bit mode. VEX is not available in real or v86 mode. */
7880 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7881 if ( IEM_IS_64BIT_CODE(pVCpu)
7882 || IEM_IS_MODRM_REG_MODE(bRm) )
7883 {
7884 IEMOP_MNEMONIC(vex3_prefix, "vex3");
7885 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7886 {
7887 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7888 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7889 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
7890 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7891 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7892 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
7893 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
7894 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7895 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
7896 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
7897 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
7898 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
7899 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
7900
7901 switch (bRm & 0x1f)
7902 {
7903 case 1: /* 0x0f lead opcode byte. */
7904#ifdef IEM_WITH_VEX
7905 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7906#else
7907 IEMOP_BITCH_ABOUT_STUB();
7908 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7909#endif
7910
7911 case 2: /* 0x0f 0x38 lead opcode bytes. */
7912#ifdef IEM_WITH_VEX
7913 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7914#else
7915 IEMOP_BITCH_ABOUT_STUB();
7916 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7917#endif
7918
7919 case 3: /* 0x0f 0x3a lead opcode bytes. */
7920#ifdef IEM_WITH_VEX
7921 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7922#else
7923 IEMOP_BITCH_ABOUT_STUB();
7924 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7925#endif
7926
7927 default:
7928 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
7929 IEMOP_RAISE_INVALID_OPCODE_RET();
7930 }
7931 }
7932 Log(("VEX3: VEX support disabled!\n"));
7933 IEMOP_RAISE_INVALID_OPCODE_RET();
7934 }
7935
7936 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
7937 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
7938}
7939
7940
7941/**
7942 * @opcode 0xc5
7943 */
7944FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
7945{
7946 /* The LES instruction is invalid 64-bit mode. In legacy and
7947 compatability mode it is invalid with MOD=3.
7948 The use as a VEX prefix is made possible by assigning the inverted
7949 REX.R to the top MOD bit, and the top bit in the inverted register
7950 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
7951 to accessing registers 0..7 in this VEX form. */
7952 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7953 if ( IEM_IS_64BIT_CODE(pVCpu)
7954 || IEM_IS_MODRM_REG_MODE(bRm))
7955 {
7956 IEMOP_MNEMONIC(vex2_prefix, "vex2");
7957 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7958 {
7959 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7960 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7961 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7962 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7963 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7964 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
7965 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
7966 pVCpu->iem.s.idxPrefix = bRm & 0x3;
7967
7968#ifdef IEM_WITH_VEX
7969 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7970#else
7971 IEMOP_BITCH_ABOUT_STUB();
7972 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7973#endif
7974 }
7975
7976 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
7977 Log(("VEX2: VEX support disabled!\n"));
7978 IEMOP_RAISE_INVALID_OPCODE_RET();
7979 }
7980
7981 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
7982 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
7983}
7984
7985
7986/**
7987 * @opcode 0xc6
7988 */
7989FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
7990{
7991 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7992 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
7993 IEMOP_RAISE_INVALID_OPCODE_RET();
7994 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
7995
7996 if (IEM_IS_MODRM_REG_MODE(bRm))
7997 {
7998 /* register access */
7999 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8000 IEM_MC_BEGIN(0, 0);
8001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8002 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
8003 IEM_MC_ADVANCE_RIP_AND_FINISH();
8004 IEM_MC_END();
8005 }
8006 else
8007 {
8008 /* memory access. */
8009 IEM_MC_BEGIN(0, 1);
8010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8012 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8014 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
8015 IEM_MC_ADVANCE_RIP_AND_FINISH();
8016 IEM_MC_END();
8017 }
8018}
8019
8020
8021/**
8022 * @opcode 0xc7
8023 */
8024FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
8025{
8026 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8027 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8028 IEMOP_RAISE_INVALID_OPCODE_RET();
8029 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
8030
8031 if (IEM_IS_MODRM_REG_MODE(bRm))
8032 {
8033 /* register access */
8034 switch (pVCpu->iem.s.enmEffOpSize)
8035 {
8036 case IEMMODE_16BIT:
8037 IEM_MC_BEGIN(0, 0);
8038 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8040 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8041 IEM_MC_ADVANCE_RIP_AND_FINISH();
8042 IEM_MC_END();
8043 break;
8044
8045 case IEMMODE_32BIT:
8046 IEM_MC_BEGIN(0, 0);
8047 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8049 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8050 IEM_MC_ADVANCE_RIP_AND_FINISH();
8051 IEM_MC_END();
8052 break;
8053
8054 case IEMMODE_64BIT:
8055 IEM_MC_BEGIN(0, 0);
8056 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8058 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8059 IEM_MC_ADVANCE_RIP_AND_FINISH();
8060 IEM_MC_END();
8061 break;
8062
8063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8064 }
8065 }
8066 else
8067 {
8068 /* memory access. */
8069 switch (pVCpu->iem.s.enmEffOpSize)
8070 {
8071 case IEMMODE_16BIT:
8072 IEM_MC_BEGIN(0, 1);
8073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8075 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8077 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8078 IEM_MC_ADVANCE_RIP_AND_FINISH();
8079 IEM_MC_END();
8080 break;
8081
8082 case IEMMODE_32BIT:
8083 IEM_MC_BEGIN(0, 1);
8084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8086 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8088 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8089 IEM_MC_ADVANCE_RIP_AND_FINISH();
8090 IEM_MC_END();
8091 break;
8092
8093 case IEMMODE_64BIT:
8094 IEM_MC_BEGIN(0, 1);
8095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8097 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8099 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8100 IEM_MC_ADVANCE_RIP_AND_FINISH();
8101 IEM_MC_END();
8102 break;
8103
8104 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8105 }
8106 }
8107}
8108
8109
8110
8111
8112/**
8113 * @opcode 0xc8
8114 */
8115FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8116{
8117 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8118 IEMOP_HLP_MIN_186();
8119 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8120 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
8121 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
8122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8123 IEM_MC_DEFER_TO_CIMPL_3_RET(0, iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
8124}
8125
8126
8127/**
8128 * @opcode 0xc9
8129 */
8130FNIEMOP_DEF(iemOp_leave)
8131{
8132 IEMOP_MNEMONIC(leave, "leave");
8133 IEMOP_HLP_MIN_186();
8134 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8136 IEM_MC_DEFER_TO_CIMPL_1_RET(0, iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
8137}
8138
8139
8140/**
8141 * @opcode 0xca
8142 */
8143FNIEMOP_DEF(iemOp_retf_Iw)
8144{
8145 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
8146 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8148 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
8149 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
8150}
8151
8152
8153/**
8154 * @opcode 0xcb
8155 */
8156FNIEMOP_DEF(iemOp_retf)
8157{
8158 IEMOP_MNEMONIC(retf, "retf");
8159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8160 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
8161 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
8162}
8163
8164
8165/**
8166 * @opcode 0xcc
8167 */
8168FNIEMOP_DEF(iemOp_int3)
8169{
8170 IEMOP_MNEMONIC(int3, "int3");
8171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8172 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8173 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8174 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
8175}
8176
8177
8178/**
8179 * @opcode 0xcd
8180 */
8181FNIEMOP_DEF(iemOp_int_Ib)
8182{
8183 IEMOP_MNEMONIC(int_Ib, "int Ib");
8184 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
8185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8186 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8187 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8188 iemCImpl_int, u8Int, IEMINT_INTN);
8189}
8190
8191
8192/**
8193 * @opcode 0xce
8194 */
8195FNIEMOP_DEF(iemOp_into)
8196{
8197 IEMOP_MNEMONIC(into, "into");
8198 IEMOP_HLP_NO_64BIT();
8199 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_CONDITIONAL
8200 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8201 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
8202}
8203
8204
8205/**
8206 * @opcode 0xcf
8207 */
8208FNIEMOP_DEF(iemOp_iret)
8209{
8210 IEMOP_MNEMONIC(iret, "iret");
8211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8212 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8213 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
8214 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
8215}
8216
8217
8218/**
8219 * @opcode 0xd0
8220 */
8221FNIEMOP_DEF(iemOp_Grp2_Eb_1)
8222{
8223 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8224 PCIEMOPSHIFTSIZES pImpl;
8225 switch (IEM_GET_MODRM_REG_8(bRm))
8226 {
8227 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
8228 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
8229 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
8230 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
8231 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
8232 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
8233 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
8234 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8235 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8236 }
8237 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8238
8239 if (IEM_IS_MODRM_REG_MODE(bRm))
8240 {
8241 /* register */
8242 IEM_MC_BEGIN(3, 0);
8243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8244 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8245 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8246 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8247 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8248 IEM_MC_REF_EFLAGS(pEFlags);
8249 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8250 IEM_MC_ADVANCE_RIP_AND_FINISH();
8251 IEM_MC_END();
8252 }
8253 else
8254 {
8255 /* memory */
8256 IEM_MC_BEGIN(3, 3);
8257 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8258 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8259 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8261 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8262
8263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8265 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8266 IEM_MC_FETCH_EFLAGS(EFlags);
8267 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8268
8269 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8270 IEM_MC_COMMIT_EFLAGS(EFlags);
8271 IEM_MC_ADVANCE_RIP_AND_FINISH();
8272 IEM_MC_END();
8273 }
8274}
8275
8276
8277
8278/**
8279 * @opcode 0xd1
8280 */
8281FNIEMOP_DEF(iemOp_Grp2_Ev_1)
8282{
8283 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8284 PCIEMOPSHIFTSIZES pImpl;
8285 switch (IEM_GET_MODRM_REG_8(bRm))
8286 {
8287 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
8288 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
8289 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
8290 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
8291 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
8292 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
8293 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
8294 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8295 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8296 }
8297 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8298
8299 if (IEM_IS_MODRM_REG_MODE(bRm))
8300 {
8301 /* register */
8302 switch (pVCpu->iem.s.enmEffOpSize)
8303 {
8304 case IEMMODE_16BIT:
8305 IEM_MC_BEGIN(3, 0);
8306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8307 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8308 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8309 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8310 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8311 IEM_MC_REF_EFLAGS(pEFlags);
8312 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8313 IEM_MC_ADVANCE_RIP_AND_FINISH();
8314 IEM_MC_END();
8315 break;
8316
8317 case IEMMODE_32BIT:
8318 IEM_MC_BEGIN(3, 0);
8319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8320 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8321 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8322 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8323 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8324 IEM_MC_REF_EFLAGS(pEFlags);
8325 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8326 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8327 IEM_MC_ADVANCE_RIP_AND_FINISH();
8328 IEM_MC_END();
8329 break;
8330
8331 case IEMMODE_64BIT:
8332 IEM_MC_BEGIN(3, 0);
8333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8334 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8335 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8336 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8337 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8338 IEM_MC_REF_EFLAGS(pEFlags);
8339 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8340 IEM_MC_ADVANCE_RIP_AND_FINISH();
8341 IEM_MC_END();
8342 break;
8343
8344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8345 }
8346 }
8347 else
8348 {
8349 /* memory */
8350 switch (pVCpu->iem.s.enmEffOpSize)
8351 {
8352 case IEMMODE_16BIT:
8353 IEM_MC_BEGIN(3, 2);
8354 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8355 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8356 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8357 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8358
8359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8361 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8362 IEM_MC_FETCH_EFLAGS(EFlags);
8363 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8364
8365 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8366 IEM_MC_COMMIT_EFLAGS(EFlags);
8367 IEM_MC_ADVANCE_RIP_AND_FINISH();
8368 IEM_MC_END();
8369 break;
8370
8371 case IEMMODE_32BIT:
8372 IEM_MC_BEGIN(3, 2);
8373 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8374 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8375 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8377
8378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8380 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8381 IEM_MC_FETCH_EFLAGS(EFlags);
8382 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8383
8384 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8385 IEM_MC_COMMIT_EFLAGS(EFlags);
8386 IEM_MC_ADVANCE_RIP_AND_FINISH();
8387 IEM_MC_END();
8388 break;
8389
8390 case IEMMODE_64BIT:
8391 IEM_MC_BEGIN(3, 2);
8392 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8393 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8394 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8396
8397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8399 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8400 IEM_MC_FETCH_EFLAGS(EFlags);
8401 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8402
8403 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8404 IEM_MC_COMMIT_EFLAGS(EFlags);
8405 IEM_MC_ADVANCE_RIP_AND_FINISH();
8406 IEM_MC_END();
8407 break;
8408
8409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8410 }
8411 }
8412}
8413
8414
8415/**
8416 * @opcode 0xd2
8417 */
8418FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
8419{
8420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8421 PCIEMOPSHIFTSIZES pImpl;
8422 switch (IEM_GET_MODRM_REG_8(bRm))
8423 {
8424 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
8425 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
8426 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
8427 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
8428 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
8429 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
8430 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
8431 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8432 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
8433 }
8434 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8435
8436 if (IEM_IS_MODRM_REG_MODE(bRm))
8437 {
8438 /* register */
8439 IEM_MC_BEGIN(3, 0);
8440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8441 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8442 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8443 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8444 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8445 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8446 IEM_MC_REF_EFLAGS(pEFlags);
8447 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8448 IEM_MC_ADVANCE_RIP_AND_FINISH();
8449 IEM_MC_END();
8450 }
8451 else
8452 {
8453 /* memory */
8454 IEM_MC_BEGIN(3, 3);
8455 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8456 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8457 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8459 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8460
8461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8463 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8464 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8465 IEM_MC_FETCH_EFLAGS(EFlags);
8466 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8467
8468 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8469 IEM_MC_COMMIT_EFLAGS(EFlags);
8470 IEM_MC_ADVANCE_RIP_AND_FINISH();
8471 IEM_MC_END();
8472 }
8473}
8474
8475
8476/**
8477 * @opcode 0xd3
8478 */
8479FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
8480{
8481 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8482 PCIEMOPSHIFTSIZES pImpl;
8483 switch (IEM_GET_MODRM_REG_8(bRm))
8484 {
8485 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
8486 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
8487 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
8488 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
8489 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
8490 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
8491 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
8492 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8493 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8494 }
8495 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8496
8497 if (IEM_IS_MODRM_REG_MODE(bRm))
8498 {
8499 /* register */
8500 switch (pVCpu->iem.s.enmEffOpSize)
8501 {
8502 case IEMMODE_16BIT:
8503 IEM_MC_BEGIN(3, 0);
8504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8505 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8506 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8507 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8508 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8509 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8510 IEM_MC_REF_EFLAGS(pEFlags);
8511 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8512 IEM_MC_ADVANCE_RIP_AND_FINISH();
8513 IEM_MC_END();
8514 break;
8515
8516 case IEMMODE_32BIT:
8517 IEM_MC_BEGIN(3, 0);
8518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8519 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8520 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8521 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8522 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8523 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8524 IEM_MC_REF_EFLAGS(pEFlags);
8525 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8526 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8527 IEM_MC_ADVANCE_RIP_AND_FINISH();
8528 IEM_MC_END();
8529 break;
8530
8531 case IEMMODE_64BIT:
8532 IEM_MC_BEGIN(3, 0);
8533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8534 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8535 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8536 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8537 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8538 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8539 IEM_MC_REF_EFLAGS(pEFlags);
8540 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8541 IEM_MC_ADVANCE_RIP_AND_FINISH();
8542 IEM_MC_END();
8543 break;
8544
8545 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8546 }
8547 }
8548 else
8549 {
8550 /* memory */
8551 switch (pVCpu->iem.s.enmEffOpSize)
8552 {
8553 case IEMMODE_16BIT:
8554 IEM_MC_BEGIN(3, 2);
8555 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8556 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8557 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8559
8560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8562 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8563 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8564 IEM_MC_FETCH_EFLAGS(EFlags);
8565 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8566
8567 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8568 IEM_MC_COMMIT_EFLAGS(EFlags);
8569 IEM_MC_ADVANCE_RIP_AND_FINISH();
8570 IEM_MC_END();
8571 break;
8572
8573 case IEMMODE_32BIT:
8574 IEM_MC_BEGIN(3, 2);
8575 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8576 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8577 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8579
8580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8582 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8583 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8584 IEM_MC_FETCH_EFLAGS(EFlags);
8585 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8586
8587 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8588 IEM_MC_COMMIT_EFLAGS(EFlags);
8589 IEM_MC_ADVANCE_RIP_AND_FINISH();
8590 IEM_MC_END();
8591 break;
8592
8593 case IEMMODE_64BIT:
8594 IEM_MC_BEGIN(3, 2);
8595 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8596 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8597 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8598 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8599
8600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8602 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8603 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8604 IEM_MC_FETCH_EFLAGS(EFlags);
8605 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8606
8607 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8608 IEM_MC_COMMIT_EFLAGS(EFlags);
8609 IEM_MC_ADVANCE_RIP_AND_FINISH();
8610 IEM_MC_END();
8611 break;
8612
8613 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8614 }
8615 }
8616}
8617
8618/**
8619 * @opcode 0xd4
8620 */
8621FNIEMOP_DEF(iemOp_aam_Ib)
8622{
8623 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
8624 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
8625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8626 IEMOP_HLP_NO_64BIT();
8627 if (!bImm)
8628 IEMOP_RAISE_DIVIDE_ERROR_RET();
8629 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aam, bImm);
8630}
8631
8632
8633/**
8634 * @opcode 0xd5
8635 */
8636FNIEMOP_DEF(iemOp_aad_Ib)
8637{
8638 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
8639 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
8640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8641 IEMOP_HLP_NO_64BIT();
8642 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aad, bImm);
8643}
8644
8645
8646/**
8647 * @opcode 0xd6
8648 */
8649FNIEMOP_DEF(iemOp_salc)
8650{
8651 IEMOP_MNEMONIC(salc, "salc");
8652 IEMOP_HLP_NO_64BIT();
8653
8654 IEM_MC_BEGIN(0, 0);
8655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8656 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8657 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
8658 } IEM_MC_ELSE() {
8659 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
8660 } IEM_MC_ENDIF();
8661 IEM_MC_ADVANCE_RIP_AND_FINISH();
8662 IEM_MC_END();
8663}
8664
8665
8666/**
8667 * @opcode 0xd7
8668 */
8669FNIEMOP_DEF(iemOp_xlat)
8670{
8671 IEMOP_MNEMONIC(xlat, "xlat");
8672 switch (pVCpu->iem.s.enmEffAddrMode)
8673 {
8674 case IEMMODE_16BIT:
8675 IEM_MC_BEGIN(2, 0);
8676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8677 IEM_MC_LOCAL(uint8_t, u8Tmp);
8678 IEM_MC_LOCAL(uint16_t, u16Addr);
8679 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
8680 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
8681 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
8682 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8683 IEM_MC_ADVANCE_RIP_AND_FINISH();
8684 IEM_MC_END();
8685 break;
8686
8687 case IEMMODE_32BIT:
8688 IEM_MC_BEGIN(2, 0);
8689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8690 IEM_MC_LOCAL(uint8_t, u8Tmp);
8691 IEM_MC_LOCAL(uint32_t, u32Addr);
8692 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
8693 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
8694 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
8695 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8696 IEM_MC_ADVANCE_RIP_AND_FINISH();
8697 IEM_MC_END();
8698 break;
8699
8700 case IEMMODE_64BIT:
8701 IEM_MC_BEGIN(2, 0);
8702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8703 IEM_MC_LOCAL(uint8_t, u8Tmp);
8704 IEM_MC_LOCAL(uint64_t, u64Addr);
8705 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
8706 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
8707 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
8708 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8709 IEM_MC_ADVANCE_RIP_AND_FINISH();
8710 IEM_MC_END();
8711 break;
8712
8713 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8714 }
8715}
8716
8717
8718/**
8719 * Common worker for FPU instructions working on ST0 and STn, and storing the
8720 * result in ST0.
8721 *
8722 * @param bRm Mod R/M byte.
8723 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8724 */
8725FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8726{
8727 IEM_MC_BEGIN(3, 1);
8728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8729 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8730 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8731 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8732 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8733
8734 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8735 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8736 IEM_MC_PREPARE_FPU_USAGE();
8737 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8738 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8739 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8740 } IEM_MC_ELSE() {
8741 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8742 } IEM_MC_ENDIF();
8743 IEM_MC_ADVANCE_RIP_AND_FINISH();
8744
8745 IEM_MC_END();
8746}
8747
8748
8749/**
8750 * Common worker for FPU instructions working on ST0 and STn, and only affecting
8751 * flags.
8752 *
8753 * @param bRm Mod R/M byte.
8754 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8755 */
8756FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8757{
8758 IEM_MC_BEGIN(3, 1);
8759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8760 IEM_MC_LOCAL(uint16_t, u16Fsw);
8761 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8762 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8763 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8764
8765 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8766 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8767 IEM_MC_PREPARE_FPU_USAGE();
8768 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8769 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8770 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8771 } IEM_MC_ELSE() {
8772 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8773 } IEM_MC_ENDIF();
8774 IEM_MC_ADVANCE_RIP_AND_FINISH();
8775
8776 IEM_MC_END();
8777}
8778
8779
8780/**
8781 * Common worker for FPU instructions working on ST0 and STn, only affecting
8782 * flags, and popping when done.
8783 *
8784 * @param bRm Mod R/M byte.
8785 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8786 */
8787FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8788{
8789 IEM_MC_BEGIN(3, 1);
8790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8791 IEM_MC_LOCAL(uint16_t, u16Fsw);
8792 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8793 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8794 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8795
8796 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8797 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8798 IEM_MC_PREPARE_FPU_USAGE();
8799 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8800 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8801 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8802 } IEM_MC_ELSE() {
8803 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8804 } IEM_MC_ENDIF();
8805 IEM_MC_ADVANCE_RIP_AND_FINISH();
8806
8807 IEM_MC_END();
8808}
8809
8810
8811/** Opcode 0xd8 11/0. */
8812FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
8813{
8814 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
8815 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
8816}
8817
8818
8819/** Opcode 0xd8 11/1. */
8820FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
8821{
8822 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
8823 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
8824}
8825
8826
8827/** Opcode 0xd8 11/2. */
8828FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
8829{
8830 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
8831 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
8832}
8833
8834
8835/** Opcode 0xd8 11/3. */
8836FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
8837{
8838 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
8839 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
8840}
8841
8842
8843/** Opcode 0xd8 11/4. */
8844FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
8845{
8846 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
8847 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
8848}
8849
8850
8851/** Opcode 0xd8 11/5. */
8852FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
8853{
8854 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
8855 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
8856}
8857
8858
8859/** Opcode 0xd8 11/6. */
8860FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
8861{
8862 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
8863 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
8864}
8865
8866
8867/** Opcode 0xd8 11/7. */
8868FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
8869{
8870 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
8871 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
8872}
8873
8874
8875/**
8876 * Common worker for FPU instructions working on ST0 and an m32r, and storing
8877 * the result in ST0.
8878 *
8879 * @param bRm Mod R/M byte.
8880 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8881 */
8882FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
8883{
8884 IEM_MC_BEGIN(3, 3);
8885 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8886 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8887 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8888 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8889 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8890 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8891
8892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8894
8895 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8896 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8897 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8898
8899 IEM_MC_PREPARE_FPU_USAGE();
8900 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8901 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
8902 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8903 } IEM_MC_ELSE() {
8904 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8905 } IEM_MC_ENDIF();
8906 IEM_MC_ADVANCE_RIP_AND_FINISH();
8907
8908 IEM_MC_END();
8909}
8910
8911
8912/** Opcode 0xd8 !11/0. */
8913FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
8914{
8915 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
8916 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
8917}
8918
8919
8920/** Opcode 0xd8 !11/1. */
8921FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
8922{
8923 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
8924 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
8925}
8926
8927
8928/** Opcode 0xd8 !11/2. */
8929FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
8930{
8931 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
8932
8933 IEM_MC_BEGIN(3, 3);
8934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8935 IEM_MC_LOCAL(uint16_t, u16Fsw);
8936 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8937 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8938 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8939 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8940
8941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8943
8944 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8945 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8946 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8947
8948 IEM_MC_PREPARE_FPU_USAGE();
8949 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8950 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8951 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8952 } IEM_MC_ELSE() {
8953 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8954 } IEM_MC_ENDIF();
8955 IEM_MC_ADVANCE_RIP_AND_FINISH();
8956
8957 IEM_MC_END();
8958}
8959
8960
8961/** Opcode 0xd8 !11/3. */
8962FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
8963{
8964 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
8965
8966 IEM_MC_BEGIN(3, 3);
8967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8968 IEM_MC_LOCAL(uint16_t, u16Fsw);
8969 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8970 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8971 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8972 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8973
8974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8976
8977 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8978 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8979 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8980
8981 IEM_MC_PREPARE_FPU_USAGE();
8982 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8983 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8984 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8985 } IEM_MC_ELSE() {
8986 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8987 } IEM_MC_ENDIF();
8988 IEM_MC_ADVANCE_RIP_AND_FINISH();
8989
8990 IEM_MC_END();
8991}
8992
8993
8994/** Opcode 0xd8 !11/4. */
8995FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
8996{
8997 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
8998 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
8999}
9000
9001
9002/** Opcode 0xd8 !11/5. */
9003FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
9004{
9005 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
9006 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
9007}
9008
9009
9010/** Opcode 0xd8 !11/6. */
9011FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
9012{
9013 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
9014 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
9015}
9016
9017
9018/** Opcode 0xd8 !11/7. */
9019FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
9020{
9021 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
9022 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
9023}
9024
9025
9026/**
9027 * @opcode 0xd8
9028 */
9029FNIEMOP_DEF(iemOp_EscF0)
9030{
9031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9032 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
9033
9034 if (IEM_IS_MODRM_REG_MODE(bRm))
9035 {
9036 switch (IEM_GET_MODRM_REG_8(bRm))
9037 {
9038 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
9039 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
9040 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
9041 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9042 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
9043 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
9044 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
9045 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
9046 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9047 }
9048 }
9049 else
9050 {
9051 switch (IEM_GET_MODRM_REG_8(bRm))
9052 {
9053 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
9054 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
9055 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
9056 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
9057 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
9058 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
9059 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
9060 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
9061 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9062 }
9063 }
9064}
9065
9066
9067/** Opcode 0xd9 /0 mem32real
9068 * @sa iemOp_fld_m64r */
9069FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
9070{
9071 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
9072
9073 IEM_MC_BEGIN(2, 3);
9074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9075 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9076 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
9077 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9078 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
9079
9080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9082
9083 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9084 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9085 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9086 IEM_MC_PREPARE_FPU_USAGE();
9087 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9088 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
9089 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9090 } IEM_MC_ELSE() {
9091 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9092 } IEM_MC_ENDIF();
9093 IEM_MC_ADVANCE_RIP_AND_FINISH();
9094
9095 IEM_MC_END();
9096}
9097
9098
9099/** Opcode 0xd9 !11/2 mem32real */
9100FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
9101{
9102 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
9103 IEM_MC_BEGIN(3, 2);
9104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9105 IEM_MC_LOCAL(uint16_t, u16Fsw);
9106 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9107 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9108 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9109
9110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9112 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9113 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9114
9115 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9116 IEM_MC_PREPARE_FPU_USAGE();
9117 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9118 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9119 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9120 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9121 } IEM_MC_ELSE() {
9122 IEM_MC_IF_FCW_IM() {
9123 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9124 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9125 } IEM_MC_ENDIF();
9126 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9127 } IEM_MC_ENDIF();
9128 IEM_MC_ADVANCE_RIP_AND_FINISH();
9129
9130 IEM_MC_END();
9131}
9132
9133
9134/** Opcode 0xd9 !11/3 */
9135FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
9136{
9137 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
9138 IEM_MC_BEGIN(3, 2);
9139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9140 IEM_MC_LOCAL(uint16_t, u16Fsw);
9141 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9142 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9143 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9144
9145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9147 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9148 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9149
9150 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9151 IEM_MC_PREPARE_FPU_USAGE();
9152 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9153 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9154 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9155 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9156 } IEM_MC_ELSE() {
9157 IEM_MC_IF_FCW_IM() {
9158 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9159 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9160 } IEM_MC_ENDIF();
9161 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9162 } IEM_MC_ENDIF();
9163 IEM_MC_ADVANCE_RIP_AND_FINISH();
9164
9165 IEM_MC_END();
9166}
9167
9168
9169/** Opcode 0xd9 !11/4 */
9170FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
9171{
9172 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
9173 IEM_MC_BEGIN(3, 0);
9174 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9175 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9176 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9179 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9180 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9181 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9182 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9183 IEM_MC_END();
9184}
9185
9186
9187/** Opcode 0xd9 !11/5 */
9188FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
9189{
9190 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
9191 IEM_MC_BEGIN(1, 1);
9192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9193 IEM_MC_ARG(uint16_t, u16Fsw, 0);
9194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9196 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9197 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9198 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9199 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, iemCImpl_fldcw, u16Fsw);
9200 IEM_MC_END();
9201}
9202
9203
9204/** Opcode 0xd9 !11/6 */
9205FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
9206{
9207 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
9208 IEM_MC_BEGIN(3, 0);
9209 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9210 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9211 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9214 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9215 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9216 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9217 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
9218 IEM_MC_END();
9219}
9220
9221
9222/** Opcode 0xd9 !11/7 */
9223FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
9224{
9225 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
9226 IEM_MC_BEGIN(2, 0);
9227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9228 IEM_MC_LOCAL(uint16_t, u16Fcw);
9229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9231 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9232 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9233 IEM_MC_FETCH_FCW(u16Fcw);
9234 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
9235 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9236 IEM_MC_END();
9237}
9238
9239
9240/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
9241FNIEMOP_DEF(iemOp_fnop)
9242{
9243 IEMOP_MNEMONIC(fnop, "fnop");
9244 IEM_MC_BEGIN(0, 0);
9245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9246 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9247 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9248 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9249 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
9250 * intel optimizations. Investigate. */
9251 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9252 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9253 IEM_MC_END();
9254}
9255
9256
9257/** Opcode 0xd9 11/0 stN */
9258FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
9259{
9260 IEMOP_MNEMONIC(fld_stN, "fld stN");
9261 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9262 * indicates that it does. */
9263 IEM_MC_BEGIN(0, 2);
9264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9265 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9266 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9267 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9268 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9269
9270 IEM_MC_PREPARE_FPU_USAGE();
9271 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
9272 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9273 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9274 } IEM_MC_ELSE() {
9275 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
9276 } IEM_MC_ENDIF();
9277
9278 IEM_MC_ADVANCE_RIP_AND_FINISH();
9279 IEM_MC_END();
9280}
9281
9282
9283/** Opcode 0xd9 11/3 stN */
9284FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
9285{
9286 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
9287 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9288 * indicates that it does. */
9289 IEM_MC_BEGIN(2, 3);
9290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9291 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
9292 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
9293 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9294 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
9295 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
9296 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9297 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9298
9299 IEM_MC_PREPARE_FPU_USAGE();
9300 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9301 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
9302 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
9303 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9304 } IEM_MC_ELSE() {
9305 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
9306 } IEM_MC_ENDIF();
9307
9308 IEM_MC_ADVANCE_RIP_AND_FINISH();
9309 IEM_MC_END();
9310}
9311
9312
9313/** Opcode 0xd9 11/4, 0xdd 11/2. */
9314FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
9315{
9316 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
9317
9318 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
9319 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
9320 if (!iDstReg)
9321 {
9322 IEM_MC_BEGIN(0, 1);
9323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9324 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
9325 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9326 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9327
9328 IEM_MC_PREPARE_FPU_USAGE();
9329 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
9330 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9331 } IEM_MC_ELSE() {
9332 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
9333 } IEM_MC_ENDIF();
9334
9335 IEM_MC_ADVANCE_RIP_AND_FINISH();
9336 IEM_MC_END();
9337 }
9338 else
9339 {
9340 IEM_MC_BEGIN(0, 2);
9341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9342 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9343 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9344 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9345 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9346
9347 IEM_MC_PREPARE_FPU_USAGE();
9348 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9349 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9350 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
9351 } IEM_MC_ELSE() {
9352 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
9353 } IEM_MC_ENDIF();
9354
9355 IEM_MC_ADVANCE_RIP_AND_FINISH();
9356 IEM_MC_END();
9357 }
9358}
9359
9360
9361/**
9362 * Common worker for FPU instructions working on ST0 and replaces it with the
9363 * result, i.e. unary operators.
9364 *
9365 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9366 */
9367FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
9368{
9369 IEM_MC_BEGIN(2, 1);
9370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9371 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9372 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9373 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9374
9375 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9376 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9377 IEM_MC_PREPARE_FPU_USAGE();
9378 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9379 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
9380 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9381 } IEM_MC_ELSE() {
9382 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9383 } IEM_MC_ENDIF();
9384 IEM_MC_ADVANCE_RIP_AND_FINISH();
9385
9386 IEM_MC_END();
9387}
9388
9389
9390/** Opcode 0xd9 0xe0. */
9391FNIEMOP_DEF(iemOp_fchs)
9392{
9393 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
9394 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
9395}
9396
9397
9398/** Opcode 0xd9 0xe1. */
9399FNIEMOP_DEF(iemOp_fabs)
9400{
9401 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
9402 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
9403}
9404
9405
9406/** Opcode 0xd9 0xe4. */
9407FNIEMOP_DEF(iemOp_ftst)
9408{
9409 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
9410 IEM_MC_BEGIN(2, 1);
9411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9412 IEM_MC_LOCAL(uint16_t, u16Fsw);
9413 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9414 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9415
9416 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9417 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9418 IEM_MC_PREPARE_FPU_USAGE();
9419 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9420 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
9421 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9422 } IEM_MC_ELSE() {
9423 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9424 } IEM_MC_ENDIF();
9425 IEM_MC_ADVANCE_RIP_AND_FINISH();
9426
9427 IEM_MC_END();
9428}
9429
9430
9431/** Opcode 0xd9 0xe5. */
9432FNIEMOP_DEF(iemOp_fxam)
9433{
9434 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
9435 IEM_MC_BEGIN(2, 1);
9436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9437 IEM_MC_LOCAL(uint16_t, u16Fsw);
9438 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9439 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9440
9441 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9442 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9443 IEM_MC_PREPARE_FPU_USAGE();
9444 IEM_MC_REF_FPUREG(pr80Value, 0);
9445 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
9446 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9447 IEM_MC_ADVANCE_RIP_AND_FINISH();
9448
9449 IEM_MC_END();
9450}
9451
9452
9453/**
9454 * Common worker for FPU instructions pushing a constant onto the FPU stack.
9455 *
9456 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9457 */
9458FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
9459{
9460 IEM_MC_BEGIN(1, 1);
9461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9462 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9463 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9464
9465 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9466 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9467 IEM_MC_PREPARE_FPU_USAGE();
9468 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9469 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
9470 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9471 } IEM_MC_ELSE() {
9472 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
9473 } IEM_MC_ENDIF();
9474 IEM_MC_ADVANCE_RIP_AND_FINISH();
9475
9476 IEM_MC_END();
9477}
9478
9479
9480/** Opcode 0xd9 0xe8. */
9481FNIEMOP_DEF(iemOp_fld1)
9482{
9483 IEMOP_MNEMONIC(fld1, "fld1");
9484 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
9485}
9486
9487
9488/** Opcode 0xd9 0xe9. */
9489FNIEMOP_DEF(iemOp_fldl2t)
9490{
9491 IEMOP_MNEMONIC(fldl2t, "fldl2t");
9492 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
9493}
9494
9495
9496/** Opcode 0xd9 0xea. */
9497FNIEMOP_DEF(iemOp_fldl2e)
9498{
9499 IEMOP_MNEMONIC(fldl2e, "fldl2e");
9500 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
9501}
9502
9503/** Opcode 0xd9 0xeb. */
9504FNIEMOP_DEF(iemOp_fldpi)
9505{
9506 IEMOP_MNEMONIC(fldpi, "fldpi");
9507 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
9508}
9509
9510
9511/** Opcode 0xd9 0xec. */
9512FNIEMOP_DEF(iemOp_fldlg2)
9513{
9514 IEMOP_MNEMONIC(fldlg2, "fldlg2");
9515 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
9516}
9517
9518/** Opcode 0xd9 0xed. */
9519FNIEMOP_DEF(iemOp_fldln2)
9520{
9521 IEMOP_MNEMONIC(fldln2, "fldln2");
9522 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
9523}
9524
9525
9526/** Opcode 0xd9 0xee. */
9527FNIEMOP_DEF(iemOp_fldz)
9528{
9529 IEMOP_MNEMONIC(fldz, "fldz");
9530 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
9531}
9532
9533
9534/** Opcode 0xd9 0xf0.
9535 *
9536 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
9537 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
9538 * to produce proper results for +Inf and -Inf.
9539 *
9540 * This is probably usful in the implementation pow() and similar.
9541 */
9542FNIEMOP_DEF(iemOp_f2xm1)
9543{
9544 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
9545 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
9546}
9547
9548
9549/**
9550 * Common worker for FPU instructions working on STn and ST0, storing the result
9551 * in STn, and popping the stack unless IE, DE or ZE was raised.
9552 *
9553 * @param bRm Mod R/M byte.
9554 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9555 */
9556FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9557{
9558 IEM_MC_BEGIN(3, 1);
9559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9560 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9561 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9562 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9563 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9564
9565 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9566 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9567
9568 IEM_MC_PREPARE_FPU_USAGE();
9569 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
9570 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9571 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
9572 } IEM_MC_ELSE() {
9573 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
9574 } IEM_MC_ENDIF();
9575 IEM_MC_ADVANCE_RIP_AND_FINISH();
9576
9577 IEM_MC_END();
9578}
9579
9580
9581/** Opcode 0xd9 0xf1. */
9582FNIEMOP_DEF(iemOp_fyl2x)
9583{
9584 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
9585 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
9586}
9587
9588
9589/**
9590 * Common worker for FPU instructions working on ST0 and having two outputs, one
9591 * replacing ST0 and one pushed onto the stack.
9592 *
9593 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9594 */
9595FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
9596{
9597 IEM_MC_BEGIN(2, 1);
9598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9599 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
9600 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
9601 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9602
9603 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9604 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9605 IEM_MC_PREPARE_FPU_USAGE();
9606 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9607 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
9608 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
9609 } IEM_MC_ELSE() {
9610 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
9611 } IEM_MC_ENDIF();
9612 IEM_MC_ADVANCE_RIP_AND_FINISH();
9613
9614 IEM_MC_END();
9615}
9616
9617
9618/** Opcode 0xd9 0xf2. */
9619FNIEMOP_DEF(iemOp_fptan)
9620{
9621 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
9622 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
9623}
9624
9625
9626/** Opcode 0xd9 0xf3. */
9627FNIEMOP_DEF(iemOp_fpatan)
9628{
9629 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
9630 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
9631}
9632
9633
9634/** Opcode 0xd9 0xf4. */
9635FNIEMOP_DEF(iemOp_fxtract)
9636{
9637 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
9638 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
9639}
9640
9641
9642/** Opcode 0xd9 0xf5. */
9643FNIEMOP_DEF(iemOp_fprem1)
9644{
9645 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
9646 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
9647}
9648
9649
9650/** Opcode 0xd9 0xf6. */
9651FNIEMOP_DEF(iemOp_fdecstp)
9652{
9653 IEMOP_MNEMONIC(fdecstp, "fdecstp");
9654 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9655 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9656 * FINCSTP and FDECSTP. */
9657 IEM_MC_BEGIN(0,0);
9658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9659
9660 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9661 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9662
9663 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9664 IEM_MC_FPU_STACK_DEC_TOP();
9665 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9666
9667 IEM_MC_ADVANCE_RIP_AND_FINISH();
9668 IEM_MC_END();
9669}
9670
9671
9672/** Opcode 0xd9 0xf7. */
9673FNIEMOP_DEF(iemOp_fincstp)
9674{
9675 IEMOP_MNEMONIC(fincstp, "fincstp");
9676 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9677 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9678 * FINCSTP and FDECSTP. */
9679 IEM_MC_BEGIN(0,0);
9680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9681
9682 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9683 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9684
9685 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9686 IEM_MC_FPU_STACK_INC_TOP();
9687 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9688
9689 IEM_MC_ADVANCE_RIP_AND_FINISH();
9690 IEM_MC_END();
9691}
9692
9693
9694/** Opcode 0xd9 0xf8. */
9695FNIEMOP_DEF(iemOp_fprem)
9696{
9697 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
9698 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
9699}
9700
9701
9702/** Opcode 0xd9 0xf9. */
9703FNIEMOP_DEF(iemOp_fyl2xp1)
9704{
9705 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
9706 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
9707}
9708
9709
9710/** Opcode 0xd9 0xfa. */
9711FNIEMOP_DEF(iemOp_fsqrt)
9712{
9713 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
9714 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
9715}
9716
9717
9718/** Opcode 0xd9 0xfb. */
9719FNIEMOP_DEF(iemOp_fsincos)
9720{
9721 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
9722 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
9723}
9724
9725
9726/** Opcode 0xd9 0xfc. */
9727FNIEMOP_DEF(iemOp_frndint)
9728{
9729 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
9730 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
9731}
9732
9733
9734/** Opcode 0xd9 0xfd. */
9735FNIEMOP_DEF(iemOp_fscale)
9736{
9737 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
9738 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
9739}
9740
9741
9742/** Opcode 0xd9 0xfe. */
9743FNIEMOP_DEF(iemOp_fsin)
9744{
9745 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
9746 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
9747}
9748
9749
9750/** Opcode 0xd9 0xff. */
9751FNIEMOP_DEF(iemOp_fcos)
9752{
9753 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
9754 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
9755}
9756
9757
9758/** Used by iemOp_EscF1. */
9759IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
9760{
9761 /* 0xe0 */ iemOp_fchs,
9762 /* 0xe1 */ iemOp_fabs,
9763 /* 0xe2 */ iemOp_Invalid,
9764 /* 0xe3 */ iemOp_Invalid,
9765 /* 0xe4 */ iemOp_ftst,
9766 /* 0xe5 */ iemOp_fxam,
9767 /* 0xe6 */ iemOp_Invalid,
9768 /* 0xe7 */ iemOp_Invalid,
9769 /* 0xe8 */ iemOp_fld1,
9770 /* 0xe9 */ iemOp_fldl2t,
9771 /* 0xea */ iemOp_fldl2e,
9772 /* 0xeb */ iemOp_fldpi,
9773 /* 0xec */ iemOp_fldlg2,
9774 /* 0xed */ iemOp_fldln2,
9775 /* 0xee */ iemOp_fldz,
9776 /* 0xef */ iemOp_Invalid,
9777 /* 0xf0 */ iemOp_f2xm1,
9778 /* 0xf1 */ iemOp_fyl2x,
9779 /* 0xf2 */ iemOp_fptan,
9780 /* 0xf3 */ iemOp_fpatan,
9781 /* 0xf4 */ iemOp_fxtract,
9782 /* 0xf5 */ iemOp_fprem1,
9783 /* 0xf6 */ iemOp_fdecstp,
9784 /* 0xf7 */ iemOp_fincstp,
9785 /* 0xf8 */ iemOp_fprem,
9786 /* 0xf9 */ iemOp_fyl2xp1,
9787 /* 0xfa */ iemOp_fsqrt,
9788 /* 0xfb */ iemOp_fsincos,
9789 /* 0xfc */ iemOp_frndint,
9790 /* 0xfd */ iemOp_fscale,
9791 /* 0xfe */ iemOp_fsin,
9792 /* 0xff */ iemOp_fcos
9793};
9794
9795
9796/**
9797 * @opcode 0xd9
9798 */
9799FNIEMOP_DEF(iemOp_EscF1)
9800{
9801 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9802 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
9803
9804 if (IEM_IS_MODRM_REG_MODE(bRm))
9805 {
9806 switch (IEM_GET_MODRM_REG_8(bRm))
9807 {
9808 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
9809 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
9810 case 2:
9811 if (bRm == 0xd0)
9812 return FNIEMOP_CALL(iemOp_fnop);
9813 IEMOP_RAISE_INVALID_OPCODE_RET();
9814 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
9815 case 4:
9816 case 5:
9817 case 6:
9818 case 7:
9819 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
9820 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
9821 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9822 }
9823 }
9824 else
9825 {
9826 switch (IEM_GET_MODRM_REG_8(bRm))
9827 {
9828 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
9829 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
9830 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
9831 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
9832 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
9833 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
9834 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
9835 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
9836 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9837 }
9838 }
9839}
9840
9841
9842/** Opcode 0xda 11/0. */
9843FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
9844{
9845 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
9846 IEM_MC_BEGIN(0, 1);
9847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9848 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9849
9850 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9851 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9852
9853 IEM_MC_PREPARE_FPU_USAGE();
9854 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9855 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9856 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9857 } IEM_MC_ENDIF();
9858 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9859 } IEM_MC_ELSE() {
9860 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9861 } IEM_MC_ENDIF();
9862 IEM_MC_ADVANCE_RIP_AND_FINISH();
9863
9864 IEM_MC_END();
9865}
9866
9867
9868/** Opcode 0xda 11/1. */
9869FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
9870{
9871 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
9872 IEM_MC_BEGIN(0, 1);
9873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9874 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9875
9876 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9877 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9878
9879 IEM_MC_PREPARE_FPU_USAGE();
9880 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9881 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9882 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9883 } IEM_MC_ENDIF();
9884 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9885 } IEM_MC_ELSE() {
9886 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9887 } IEM_MC_ENDIF();
9888 IEM_MC_ADVANCE_RIP_AND_FINISH();
9889
9890 IEM_MC_END();
9891}
9892
9893
9894/** Opcode 0xda 11/2. */
9895FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
9896{
9897 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
9898 IEM_MC_BEGIN(0, 1);
9899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9900 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9901
9902 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9903 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9904
9905 IEM_MC_PREPARE_FPU_USAGE();
9906 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9907 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9908 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9909 } IEM_MC_ENDIF();
9910 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9911 } IEM_MC_ELSE() {
9912 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9913 } IEM_MC_ENDIF();
9914 IEM_MC_ADVANCE_RIP_AND_FINISH();
9915
9916 IEM_MC_END();
9917}
9918
9919
9920/** Opcode 0xda 11/3. */
9921FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
9922{
9923 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
9924 IEM_MC_BEGIN(0, 1);
9925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9926 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9927
9928 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9929 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9930
9931 IEM_MC_PREPARE_FPU_USAGE();
9932 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9933 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9934 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9935 } IEM_MC_ENDIF();
9936 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9937 } IEM_MC_ELSE() {
9938 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9939 } IEM_MC_ENDIF();
9940 IEM_MC_ADVANCE_RIP_AND_FINISH();
9941
9942 IEM_MC_END();
9943}
9944
9945
9946/**
9947 * Common worker for FPU instructions working on ST0 and ST1, only affecting
9948 * flags, and popping twice when done.
9949 *
9950 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9951 */
9952FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9953{
9954 IEM_MC_BEGIN(3, 1);
9955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9956 IEM_MC_LOCAL(uint16_t, u16Fsw);
9957 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9958 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9959 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9960
9961 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9962 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9963
9964 IEM_MC_PREPARE_FPU_USAGE();
9965 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
9966 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9967 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9968 } IEM_MC_ELSE() {
9969 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
9970 } IEM_MC_ENDIF();
9971 IEM_MC_ADVANCE_RIP_AND_FINISH();
9972
9973 IEM_MC_END();
9974}
9975
9976
9977/** Opcode 0xda 0xe9. */
9978FNIEMOP_DEF(iemOp_fucompp)
9979{
9980 IEMOP_MNEMONIC(fucompp, "fucompp");
9981 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
9982}
9983
9984
9985/**
9986 * Common worker for FPU instructions working on ST0 and an m32i, and storing
9987 * the result in ST0.
9988 *
9989 * @param bRm Mod R/M byte.
9990 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9991 */
9992FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
9993{
9994 IEM_MC_BEGIN(3, 3);
9995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9996 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9997 IEM_MC_LOCAL(int32_t, i32Val2);
9998 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9999 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10000 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10001
10002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10004
10005 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10006 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10007 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10008
10009 IEM_MC_PREPARE_FPU_USAGE();
10010 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10011 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
10012 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10013 } IEM_MC_ELSE() {
10014 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10015 } IEM_MC_ENDIF();
10016 IEM_MC_ADVANCE_RIP_AND_FINISH();
10017
10018 IEM_MC_END();
10019}
10020
10021
10022/** Opcode 0xda !11/0. */
10023FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
10024{
10025 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
10026 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
10027}
10028
10029
10030/** Opcode 0xda !11/1. */
10031FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
10032{
10033 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
10034 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
10035}
10036
10037
10038/** Opcode 0xda !11/2. */
10039FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
10040{
10041 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
10042
10043 IEM_MC_BEGIN(3, 3);
10044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10045 IEM_MC_LOCAL(uint16_t, u16Fsw);
10046 IEM_MC_LOCAL(int32_t, i32Val2);
10047 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10048 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10049 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10050
10051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10053
10054 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10055 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10056 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10057
10058 IEM_MC_PREPARE_FPU_USAGE();
10059 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10060 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10061 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10062 } IEM_MC_ELSE() {
10063 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10064 } IEM_MC_ENDIF();
10065 IEM_MC_ADVANCE_RIP_AND_FINISH();
10066
10067 IEM_MC_END();
10068}
10069
10070
10071/** Opcode 0xda !11/3. */
10072FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
10073{
10074 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
10075
10076 IEM_MC_BEGIN(3, 3);
10077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10078 IEM_MC_LOCAL(uint16_t, u16Fsw);
10079 IEM_MC_LOCAL(int32_t, i32Val2);
10080 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10081 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10082 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10083
10084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10086
10087 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10088 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10089 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10090
10091 IEM_MC_PREPARE_FPU_USAGE();
10092 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10093 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10094 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10095 } IEM_MC_ELSE() {
10096 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10097 } IEM_MC_ENDIF();
10098 IEM_MC_ADVANCE_RIP_AND_FINISH();
10099
10100 IEM_MC_END();
10101}
10102
10103
10104/** Opcode 0xda !11/4. */
10105FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
10106{
10107 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
10108 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
10109}
10110
10111
10112/** Opcode 0xda !11/5. */
10113FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
10114{
10115 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
10116 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
10117}
10118
10119
10120/** Opcode 0xda !11/6. */
10121FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
10122{
10123 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
10124 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
10125}
10126
10127
10128/** Opcode 0xda !11/7. */
10129FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
10130{
10131 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
10132 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
10133}
10134
10135
10136/**
10137 * @opcode 0xda
10138 */
10139FNIEMOP_DEF(iemOp_EscF2)
10140{
10141 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10142 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
10143 if (IEM_IS_MODRM_REG_MODE(bRm))
10144 {
10145 switch (IEM_GET_MODRM_REG_8(bRm))
10146 {
10147 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
10148 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
10149 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
10150 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
10151 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10152 case 5:
10153 if (bRm == 0xe9)
10154 return FNIEMOP_CALL(iemOp_fucompp);
10155 IEMOP_RAISE_INVALID_OPCODE_RET();
10156 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10157 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10159 }
10160 }
10161 else
10162 {
10163 switch (IEM_GET_MODRM_REG_8(bRm))
10164 {
10165 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
10166 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
10167 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
10168 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
10169 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
10170 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
10171 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
10172 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
10173 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10174 }
10175 }
10176}
10177
10178
10179/** Opcode 0xdb !11/0. */
10180FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
10181{
10182 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
10183
10184 IEM_MC_BEGIN(2, 3);
10185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10186 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10187 IEM_MC_LOCAL(int32_t, i32Val);
10188 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10189 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
10190
10191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10193
10194 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10195 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10196 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10197
10198 IEM_MC_PREPARE_FPU_USAGE();
10199 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10200 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
10201 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10202 } IEM_MC_ELSE() {
10203 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10204 } IEM_MC_ENDIF();
10205 IEM_MC_ADVANCE_RIP_AND_FINISH();
10206
10207 IEM_MC_END();
10208}
10209
10210
10211/** Opcode 0xdb !11/1. */
10212FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
10213{
10214 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
10215 IEM_MC_BEGIN(3, 2);
10216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10217 IEM_MC_LOCAL(uint16_t, u16Fsw);
10218 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10219 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10220 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10221
10222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10224 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10225 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10226
10227 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10228 IEM_MC_PREPARE_FPU_USAGE();
10229 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10230 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10231 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10232 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10233 } IEM_MC_ELSE() {
10234 IEM_MC_IF_FCW_IM() {
10235 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10236 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10237 } IEM_MC_ENDIF();
10238 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10239 } IEM_MC_ENDIF();
10240 IEM_MC_ADVANCE_RIP_AND_FINISH();
10241
10242 IEM_MC_END();
10243}
10244
10245
10246/** Opcode 0xdb !11/2. */
10247FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
10248{
10249 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
10250 IEM_MC_BEGIN(3, 2);
10251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10252 IEM_MC_LOCAL(uint16_t, u16Fsw);
10253 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10254 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10255 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10256
10257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10259 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10260 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10261
10262 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10263 IEM_MC_PREPARE_FPU_USAGE();
10264 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10265 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10266 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10267 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10268 } IEM_MC_ELSE() {
10269 IEM_MC_IF_FCW_IM() {
10270 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10271 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10272 } IEM_MC_ENDIF();
10273 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10274 } IEM_MC_ENDIF();
10275 IEM_MC_ADVANCE_RIP_AND_FINISH();
10276
10277 IEM_MC_END();
10278}
10279
10280
10281/** Opcode 0xdb !11/3. */
10282FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
10283{
10284 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
10285 IEM_MC_BEGIN(3, 2);
10286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10287 IEM_MC_LOCAL(uint16_t, u16Fsw);
10288 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10289 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10290 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10291
10292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10294 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10295 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10296
10297 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10298 IEM_MC_PREPARE_FPU_USAGE();
10299 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10300 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10301 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10302 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10303 } IEM_MC_ELSE() {
10304 IEM_MC_IF_FCW_IM() {
10305 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10306 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10307 } IEM_MC_ENDIF();
10308 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10309 } IEM_MC_ENDIF();
10310 IEM_MC_ADVANCE_RIP_AND_FINISH();
10311
10312 IEM_MC_END();
10313}
10314
10315
10316/** Opcode 0xdb !11/5. */
10317FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
10318{
10319 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
10320
10321 IEM_MC_BEGIN(2, 3);
10322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10323 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10324 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
10325 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10326 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
10327
10328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10330
10331 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10332 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10333 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10334
10335 IEM_MC_PREPARE_FPU_USAGE();
10336 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10337 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
10338 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10339 } IEM_MC_ELSE() {
10340 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10341 } IEM_MC_ENDIF();
10342 IEM_MC_ADVANCE_RIP_AND_FINISH();
10343
10344 IEM_MC_END();
10345}
10346
10347
10348/** Opcode 0xdb !11/7. */
10349FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
10350{
10351 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
10352 IEM_MC_BEGIN(3, 2);
10353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10354 IEM_MC_LOCAL(uint16_t, u16Fsw);
10355 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10356 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
10357 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10358
10359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10361 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10362 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10363
10364 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
10365 IEM_MC_PREPARE_FPU_USAGE();
10366 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10367 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
10368 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
10369 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10370 } IEM_MC_ELSE() {
10371 IEM_MC_IF_FCW_IM() {
10372 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
10373 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
10374 } IEM_MC_ENDIF();
10375 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10376 } IEM_MC_ENDIF();
10377 IEM_MC_ADVANCE_RIP_AND_FINISH();
10378
10379 IEM_MC_END();
10380}
10381
10382
10383/** Opcode 0xdb 11/0. */
10384FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
10385{
10386 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
10387 IEM_MC_BEGIN(0, 1);
10388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10389 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10390
10391 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10392 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10393
10394 IEM_MC_PREPARE_FPU_USAGE();
10395 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10396 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
10397 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10398 } IEM_MC_ENDIF();
10399 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10400 } IEM_MC_ELSE() {
10401 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10402 } IEM_MC_ENDIF();
10403 IEM_MC_ADVANCE_RIP_AND_FINISH();
10404
10405 IEM_MC_END();
10406}
10407
10408
10409/** Opcode 0xdb 11/1. */
10410FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
10411{
10412 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
10413 IEM_MC_BEGIN(0, 1);
10414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10415 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10416
10417 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10418 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10419
10420 IEM_MC_PREPARE_FPU_USAGE();
10421 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10422 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10423 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10424 } IEM_MC_ENDIF();
10425 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10426 } IEM_MC_ELSE() {
10427 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10428 } IEM_MC_ENDIF();
10429 IEM_MC_ADVANCE_RIP_AND_FINISH();
10430
10431 IEM_MC_END();
10432}
10433
10434
10435/** Opcode 0xdb 11/2. */
10436FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
10437{
10438 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
10439 IEM_MC_BEGIN(0, 1);
10440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10441 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10442
10443 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10444 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10445
10446 IEM_MC_PREPARE_FPU_USAGE();
10447 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10448 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10449 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10450 } IEM_MC_ENDIF();
10451 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10452 } IEM_MC_ELSE() {
10453 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10454 } IEM_MC_ENDIF();
10455 IEM_MC_ADVANCE_RIP_AND_FINISH();
10456
10457 IEM_MC_END();
10458}
10459
10460
10461/** Opcode 0xdb 11/3. */
10462FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
10463{
10464 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
10465 IEM_MC_BEGIN(0, 1);
10466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10467 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10468
10469 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10470 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10471
10472 IEM_MC_PREPARE_FPU_USAGE();
10473 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10474 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
10475 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10476 } IEM_MC_ENDIF();
10477 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10478 } IEM_MC_ELSE() {
10479 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10480 } IEM_MC_ENDIF();
10481 IEM_MC_ADVANCE_RIP_AND_FINISH();
10482
10483 IEM_MC_END();
10484}
10485
10486
10487/** Opcode 0xdb 0xe0. */
10488FNIEMOP_DEF(iemOp_fneni)
10489{
10490 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
10491 IEM_MC_BEGIN(0,0);
10492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10493 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10494 IEM_MC_ADVANCE_RIP_AND_FINISH();
10495 IEM_MC_END();
10496}
10497
10498
10499/** Opcode 0xdb 0xe1. */
10500FNIEMOP_DEF(iemOp_fndisi)
10501{
10502 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
10503 IEM_MC_BEGIN(0,0);
10504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10505 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10506 IEM_MC_ADVANCE_RIP_AND_FINISH();
10507 IEM_MC_END();
10508}
10509
10510
10511/** Opcode 0xdb 0xe2. */
10512FNIEMOP_DEF(iemOp_fnclex)
10513{
10514 IEMOP_MNEMONIC(fnclex, "fnclex");
10515 IEM_MC_BEGIN(0,0);
10516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10517 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10518 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10519 IEM_MC_CLEAR_FSW_EX();
10520 IEM_MC_ADVANCE_RIP_AND_FINISH();
10521 IEM_MC_END();
10522}
10523
10524
10525/** Opcode 0xdb 0xe3. */
10526FNIEMOP_DEF(iemOp_fninit)
10527{
10528 IEMOP_MNEMONIC(fninit, "fninit");
10529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10530 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, iemCImpl_finit, false /*fCheckXcpts*/);
10531}
10532
10533
10534/** Opcode 0xdb 0xe4. */
10535FNIEMOP_DEF(iemOp_fnsetpm)
10536{
10537 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
10538 IEM_MC_BEGIN(0,0);
10539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10540 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10541 IEM_MC_ADVANCE_RIP_AND_FINISH();
10542 IEM_MC_END();
10543}
10544
10545
10546/** Opcode 0xdb 0xe5. */
10547FNIEMOP_DEF(iemOp_frstpm)
10548{
10549 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
10550#if 0 /* #UDs on newer CPUs */
10551 IEM_MC_BEGIN(0,0);
10552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10553 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10554 IEM_MC_ADVANCE_RIP_AND_FINISH();
10555 IEM_MC_END();
10556 return VINF_SUCCESS;
10557#else
10558 IEMOP_RAISE_INVALID_OPCODE_RET();
10559#endif
10560}
10561
10562
10563/** Opcode 0xdb 11/5. */
10564FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
10565{
10566 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
10567 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10568 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fucomi_r80_by_r80,
10569 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10570}
10571
10572
10573/** Opcode 0xdb 11/6. */
10574FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
10575{
10576 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
10577 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10578 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
10579 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10580}
10581
10582
10583/**
10584 * @opcode 0xdb
10585 */
10586FNIEMOP_DEF(iemOp_EscF3)
10587{
10588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10589 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
10590 if (IEM_IS_MODRM_REG_MODE(bRm))
10591 {
10592 switch (IEM_GET_MODRM_REG_8(bRm))
10593 {
10594 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
10595 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
10596 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
10597 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
10598 case 4:
10599 switch (bRm)
10600 {
10601 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
10602 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
10603 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
10604 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
10605 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
10606 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
10607 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
10608 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
10609 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10610 }
10611 break;
10612 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
10613 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
10614 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10615 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10616 }
10617 }
10618 else
10619 {
10620 switch (IEM_GET_MODRM_REG_8(bRm))
10621 {
10622 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
10623 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
10624 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
10625 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
10626 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10627 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
10628 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10629 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
10630 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10631 }
10632 }
10633}
10634
10635
10636/**
10637 * Common worker for FPU instructions working on STn and ST0, and storing the
10638 * result in STn unless IE, DE or ZE was raised.
10639 *
10640 * @param bRm Mod R/M byte.
10641 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10642 */
10643FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10644{
10645 IEM_MC_BEGIN(3, 1);
10646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10647 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10648 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10649 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10650 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10651
10652 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10653 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10654
10655 IEM_MC_PREPARE_FPU_USAGE();
10656 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10657 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10658 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10659 } IEM_MC_ELSE() {
10660 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10661 } IEM_MC_ENDIF();
10662 IEM_MC_ADVANCE_RIP_AND_FINISH();
10663
10664 IEM_MC_END();
10665}
10666
10667
10668/** Opcode 0xdc 11/0. */
10669FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
10670{
10671 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
10672 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
10673}
10674
10675
10676/** Opcode 0xdc 11/1. */
10677FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
10678{
10679 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
10680 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
10681}
10682
10683
10684/** Opcode 0xdc 11/4. */
10685FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
10686{
10687 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
10688 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
10689}
10690
10691
10692/** Opcode 0xdc 11/5. */
10693FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
10694{
10695 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
10696 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
10697}
10698
10699
10700/** Opcode 0xdc 11/6. */
10701FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
10702{
10703 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
10704 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
10705}
10706
10707
10708/** Opcode 0xdc 11/7. */
10709FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
10710{
10711 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
10712 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
10713}
10714
10715
10716/**
10717 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
10718 * memory operand, and storing the result in ST0.
10719 *
10720 * @param bRm Mod R/M byte.
10721 * @param pfnImpl Pointer to the instruction implementation (assembly).
10722 */
10723FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
10724{
10725 IEM_MC_BEGIN(3, 3);
10726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10727 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10728 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
10729 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10730 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
10731 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
10732
10733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10735 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10736 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10737
10738 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10739 IEM_MC_PREPARE_FPU_USAGE();
10740 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
10741 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
10742 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10743 } IEM_MC_ELSE() {
10744 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10745 } IEM_MC_ENDIF();
10746 IEM_MC_ADVANCE_RIP_AND_FINISH();
10747
10748 IEM_MC_END();
10749}
10750
10751
10752/** Opcode 0xdc !11/0. */
10753FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
10754{
10755 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
10756 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
10757}
10758
10759
10760/** Opcode 0xdc !11/1. */
10761FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
10762{
10763 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
10764 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
10765}
10766
10767
10768/** Opcode 0xdc !11/2. */
10769FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
10770{
10771 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
10772
10773 IEM_MC_BEGIN(3, 3);
10774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10775 IEM_MC_LOCAL(uint16_t, u16Fsw);
10776 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10777 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10778 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10779 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10780
10781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10783
10784 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10785 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10786 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10787
10788 IEM_MC_PREPARE_FPU_USAGE();
10789 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10790 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10791 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10792 } IEM_MC_ELSE() {
10793 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10794 } IEM_MC_ENDIF();
10795 IEM_MC_ADVANCE_RIP_AND_FINISH();
10796
10797 IEM_MC_END();
10798}
10799
10800
10801/** Opcode 0xdc !11/3. */
10802FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
10803{
10804 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
10805
10806 IEM_MC_BEGIN(3, 3);
10807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10808 IEM_MC_LOCAL(uint16_t, u16Fsw);
10809 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10810 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10811 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10812 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10813
10814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10816
10817 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10818 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10819 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10820
10821 IEM_MC_PREPARE_FPU_USAGE();
10822 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10823 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10824 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10825 } IEM_MC_ELSE() {
10826 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10827 } IEM_MC_ENDIF();
10828 IEM_MC_ADVANCE_RIP_AND_FINISH();
10829
10830 IEM_MC_END();
10831}
10832
10833
10834/** Opcode 0xdc !11/4. */
10835FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
10836{
10837 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
10838 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
10839}
10840
10841
10842/** Opcode 0xdc !11/5. */
10843FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
10844{
10845 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
10846 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
10847}
10848
10849
10850/** Opcode 0xdc !11/6. */
10851FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
10852{
10853 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
10854 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
10855}
10856
10857
10858/** Opcode 0xdc !11/7. */
10859FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
10860{
10861 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
10862 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
10863}
10864
10865
10866/**
10867 * @opcode 0xdc
10868 */
10869FNIEMOP_DEF(iemOp_EscF4)
10870{
10871 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10872 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
10873 if (IEM_IS_MODRM_REG_MODE(bRm))
10874 {
10875 switch (IEM_GET_MODRM_REG_8(bRm))
10876 {
10877 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
10878 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
10879 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
10880 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
10881 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
10882 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
10883 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
10884 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
10885 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10886 }
10887 }
10888 else
10889 {
10890 switch (IEM_GET_MODRM_REG_8(bRm))
10891 {
10892 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
10893 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
10894 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
10895 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
10896 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
10897 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
10898 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
10899 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
10900 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10901 }
10902 }
10903}
10904
10905
10906/** Opcode 0xdd !11/0.
10907 * @sa iemOp_fld_m32r */
10908FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
10909{
10910 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
10911
10912 IEM_MC_BEGIN(2, 3);
10913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10914 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10915 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
10916 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10917 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
10918
10919 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10921 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10922 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10923
10924 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10925 IEM_MC_PREPARE_FPU_USAGE();
10926 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10927 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
10928 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10929 } IEM_MC_ELSE() {
10930 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10931 } IEM_MC_ENDIF();
10932 IEM_MC_ADVANCE_RIP_AND_FINISH();
10933
10934 IEM_MC_END();
10935}
10936
10937
10938/** Opcode 0xdd !11/0. */
10939FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
10940{
10941 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
10942 IEM_MC_BEGIN(3, 2);
10943 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10944 IEM_MC_LOCAL(uint16_t, u16Fsw);
10945 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10946 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10947 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10948
10949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10951 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10952 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10953
10954 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10955 IEM_MC_PREPARE_FPU_USAGE();
10956 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10957 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10958 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10959 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10960 } IEM_MC_ELSE() {
10961 IEM_MC_IF_FCW_IM() {
10962 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10963 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10964 } IEM_MC_ENDIF();
10965 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10966 } IEM_MC_ENDIF();
10967 IEM_MC_ADVANCE_RIP_AND_FINISH();
10968
10969 IEM_MC_END();
10970}
10971
10972
10973/** Opcode 0xdd !11/0. */
10974FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
10975{
10976 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
10977 IEM_MC_BEGIN(3, 2);
10978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10979 IEM_MC_LOCAL(uint16_t, u16Fsw);
10980 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10981 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10982 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10983
10984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10986 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10987 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10988
10989 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10990 IEM_MC_PREPARE_FPU_USAGE();
10991 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10992 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
10993 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10994 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10995 } IEM_MC_ELSE() {
10996 IEM_MC_IF_FCW_IM() {
10997 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
10998 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
10999 } IEM_MC_ENDIF();
11000 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11001 } IEM_MC_ENDIF();
11002 IEM_MC_ADVANCE_RIP_AND_FINISH();
11003
11004 IEM_MC_END();
11005}
11006
11007
11008
11009
11010/** Opcode 0xdd !11/0. */
11011FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
11012{
11013 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
11014 IEM_MC_BEGIN(3, 2);
11015 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11016 IEM_MC_LOCAL(uint16_t, u16Fsw);
11017 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11018 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11019 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11020
11021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11023 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11024 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11025
11026 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11027 IEM_MC_PREPARE_FPU_USAGE();
11028 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11029 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11030 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11031 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11032 } IEM_MC_ELSE() {
11033 IEM_MC_IF_FCW_IM() {
11034 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11035 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11036 } IEM_MC_ENDIF();
11037 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11038 } IEM_MC_ENDIF();
11039 IEM_MC_ADVANCE_RIP_AND_FINISH();
11040
11041 IEM_MC_END();
11042}
11043
11044
11045/** Opcode 0xdd !11/0. */
11046FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
11047{
11048 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
11049 IEM_MC_BEGIN(3, 0);
11050 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11051 IEM_MC_ARG(uint8_t, iEffSeg, 1);
11052 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
11053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11055 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11056 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11057 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11058 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
11059 IEM_MC_END();
11060}
11061
11062
11063/** Opcode 0xdd !11/0. */
11064FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
11065{
11066 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
11067 IEM_MC_BEGIN(3, 0);
11068 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11069 IEM_MC_ARG(uint8_t, iEffSeg, 1);
11070 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
11071 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11073 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11074 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
11075 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11076 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
11077 IEM_MC_END();
11078}
11079
11080/** Opcode 0xdd !11/0. */
11081FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
11082{
11083 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
11084
11085 IEM_MC_BEGIN(0, 2);
11086 IEM_MC_LOCAL(uint16_t, u16Tmp);
11087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11088
11089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11091 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11092
11093 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11094 IEM_MC_FETCH_FSW(u16Tmp);
11095 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
11096 IEM_MC_ADVANCE_RIP_AND_FINISH();
11097
11098/** @todo Debug / drop a hint to the verifier that things may differ
11099 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
11100 * NT4SP1. (X86_FSW_PE) */
11101 IEM_MC_END();
11102}
11103
11104
11105/** Opcode 0xdd 11/0. */
11106FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
11107{
11108 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
11109 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
11110 unmodified. */
11111 IEM_MC_BEGIN(0, 0);
11112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11113
11114 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11115 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11116
11117 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11118 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11119 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11120
11121 IEM_MC_ADVANCE_RIP_AND_FINISH();
11122 IEM_MC_END();
11123}
11124
11125
11126/** Opcode 0xdd 11/1. */
11127FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
11128{
11129 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
11130 IEM_MC_BEGIN(0, 2);
11131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11132 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11133 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11134 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11135 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11136
11137 IEM_MC_PREPARE_FPU_USAGE();
11138 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11139 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11140 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11141 } IEM_MC_ELSE() {
11142 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11143 } IEM_MC_ENDIF();
11144
11145 IEM_MC_ADVANCE_RIP_AND_FINISH();
11146 IEM_MC_END();
11147}
11148
11149
11150/** Opcode 0xdd 11/3. */
11151FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
11152{
11153 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
11154 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
11155}
11156
11157
11158/** Opcode 0xdd 11/4. */
11159FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
11160{
11161 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
11162 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
11163}
11164
11165
11166/**
11167 * @opcode 0xdd
11168 */
11169FNIEMOP_DEF(iemOp_EscF5)
11170{
11171 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11172 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
11173 if (IEM_IS_MODRM_REG_MODE(bRm))
11174 {
11175 switch (IEM_GET_MODRM_REG_8(bRm))
11176 {
11177 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
11178 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
11179 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
11180 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
11181 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
11182 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
11183 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11184 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11185 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11186 }
11187 }
11188 else
11189 {
11190 switch (IEM_GET_MODRM_REG_8(bRm))
11191 {
11192 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
11193 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
11194 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
11195 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
11196 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
11197 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
11198 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
11199 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
11200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11201 }
11202 }
11203}
11204
11205
11206/** Opcode 0xde 11/0. */
11207FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
11208{
11209 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
11210 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
11211}
11212
11213
11214/** Opcode 0xde 11/0. */
11215FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
11216{
11217 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
11218 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
11219}
11220
11221
11222/** Opcode 0xde 0xd9. */
11223FNIEMOP_DEF(iemOp_fcompp)
11224{
11225 IEMOP_MNEMONIC(fcompp, "fcompp");
11226 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
11227}
11228
11229
11230/** Opcode 0xde 11/4. */
11231FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
11232{
11233 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
11234 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
11235}
11236
11237
11238/** Opcode 0xde 11/5. */
11239FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
11240{
11241 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
11242 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
11243}
11244
11245
11246/** Opcode 0xde 11/6. */
11247FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
11248{
11249 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
11250 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
11251}
11252
11253
11254/** Opcode 0xde 11/7. */
11255FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
11256{
11257 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
11258 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
11259}
11260
11261
11262/**
11263 * Common worker for FPU instructions working on ST0 and an m16i, and storing
11264 * the result in ST0.
11265 *
11266 * @param bRm Mod R/M byte.
11267 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11268 */
11269FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
11270{
11271 IEM_MC_BEGIN(3, 3);
11272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11273 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11274 IEM_MC_LOCAL(int16_t, i16Val2);
11275 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11276 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11277 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11278
11279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11281
11282 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11283 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11284 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11285
11286 IEM_MC_PREPARE_FPU_USAGE();
11287 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11288 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
11289 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11290 } IEM_MC_ELSE() {
11291 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11292 } IEM_MC_ENDIF();
11293 IEM_MC_ADVANCE_RIP_AND_FINISH();
11294
11295 IEM_MC_END();
11296}
11297
11298
11299/** Opcode 0xde !11/0. */
11300FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
11301{
11302 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
11303 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
11304}
11305
11306
11307/** Opcode 0xde !11/1. */
11308FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
11309{
11310 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
11311 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
11312}
11313
11314
11315/** Opcode 0xde !11/2. */
11316FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
11317{
11318 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
11319
11320 IEM_MC_BEGIN(3, 3);
11321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11322 IEM_MC_LOCAL(uint16_t, u16Fsw);
11323 IEM_MC_LOCAL(int16_t, i16Val2);
11324 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11325 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11326 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11327
11328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11330
11331 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11332 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11333 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11334
11335 IEM_MC_PREPARE_FPU_USAGE();
11336 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11337 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11338 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11339 } IEM_MC_ELSE() {
11340 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11341 } IEM_MC_ENDIF();
11342 IEM_MC_ADVANCE_RIP_AND_FINISH();
11343
11344 IEM_MC_END();
11345}
11346
11347
11348/** Opcode 0xde !11/3. */
11349FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
11350{
11351 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
11352
11353 IEM_MC_BEGIN(3, 3);
11354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11355 IEM_MC_LOCAL(uint16_t, u16Fsw);
11356 IEM_MC_LOCAL(int16_t, i16Val2);
11357 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11358 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11359 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11360
11361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11363
11364 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11365 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11366 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11367
11368 IEM_MC_PREPARE_FPU_USAGE();
11369 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11370 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11371 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11372 } IEM_MC_ELSE() {
11373 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11374 } IEM_MC_ENDIF();
11375 IEM_MC_ADVANCE_RIP_AND_FINISH();
11376
11377 IEM_MC_END();
11378}
11379
11380
11381/** Opcode 0xde !11/4. */
11382FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
11383{
11384 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
11385 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
11386}
11387
11388
11389/** Opcode 0xde !11/5. */
11390FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
11391{
11392 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
11393 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
11394}
11395
11396
11397/** Opcode 0xde !11/6. */
11398FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
11399{
11400 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
11401 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
11402}
11403
11404
11405/** Opcode 0xde !11/7. */
11406FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
11407{
11408 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
11409 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
11410}
11411
11412
11413/**
11414 * @opcode 0xde
11415 */
11416FNIEMOP_DEF(iemOp_EscF6)
11417{
11418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11419 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
11420 if (IEM_IS_MODRM_REG_MODE(bRm))
11421 {
11422 switch (IEM_GET_MODRM_REG_8(bRm))
11423 {
11424 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
11425 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
11426 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
11427 case 3: if (bRm == 0xd9)
11428 return FNIEMOP_CALL(iemOp_fcompp);
11429 IEMOP_RAISE_INVALID_OPCODE_RET();
11430 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
11431 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
11432 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
11433 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
11434 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11435 }
11436 }
11437 else
11438 {
11439 switch (IEM_GET_MODRM_REG_8(bRm))
11440 {
11441 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
11442 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
11443 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
11444 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
11445 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
11446 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
11447 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
11448 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
11449 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11450 }
11451 }
11452}
11453
11454
11455/** Opcode 0xdf 11/0.
11456 * Undocument instruction, assumed to work like ffree + fincstp. */
11457FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
11458{
11459 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
11460 IEM_MC_BEGIN(0, 0);
11461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11462
11463 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11464 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11465
11466 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11467 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11468 IEM_MC_FPU_STACK_INC_TOP();
11469 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11470
11471 IEM_MC_ADVANCE_RIP_AND_FINISH();
11472 IEM_MC_END();
11473}
11474
11475
11476/** Opcode 0xdf 0xe0. */
11477FNIEMOP_DEF(iemOp_fnstsw_ax)
11478{
11479 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
11480 IEM_MC_BEGIN(0, 1);
11481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11482 IEM_MC_LOCAL(uint16_t, u16Tmp);
11483 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11484 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11485 IEM_MC_FETCH_FSW(u16Tmp);
11486 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
11487 IEM_MC_ADVANCE_RIP_AND_FINISH();
11488 IEM_MC_END();
11489}
11490
11491
11492/** Opcode 0xdf 11/5. */
11493FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
11494{
11495 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
11496 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
11497 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
11498 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11499}
11500
11501
11502/** Opcode 0xdf 11/6. */
11503FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
11504{
11505 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
11506 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
11507 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
11508 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11509}
11510
11511
11512/** Opcode 0xdf !11/0. */
11513FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
11514{
11515 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
11516
11517 IEM_MC_BEGIN(2, 3);
11518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11519 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11520 IEM_MC_LOCAL(int16_t, i16Val);
11521 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11522 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
11523
11524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11526
11527 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11528 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11529 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11530
11531 IEM_MC_PREPARE_FPU_USAGE();
11532 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11533 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
11534 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11535 } IEM_MC_ELSE() {
11536 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11537 } IEM_MC_ENDIF();
11538 IEM_MC_ADVANCE_RIP_AND_FINISH();
11539
11540 IEM_MC_END();
11541}
11542
11543
11544/** Opcode 0xdf !11/1. */
11545FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
11546{
11547 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
11548 IEM_MC_BEGIN(3, 2);
11549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11550 IEM_MC_LOCAL(uint16_t, u16Fsw);
11551 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11552 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11553 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11554
11555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11557 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11558 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11559
11560 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11561 IEM_MC_PREPARE_FPU_USAGE();
11562 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11563 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11564 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11565 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11566 } IEM_MC_ELSE() {
11567 IEM_MC_IF_FCW_IM() {
11568 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11569 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11570 } IEM_MC_ENDIF();
11571 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11572 } IEM_MC_ENDIF();
11573 IEM_MC_ADVANCE_RIP_AND_FINISH();
11574
11575 IEM_MC_END();
11576}
11577
11578
11579/** Opcode 0xdf !11/2. */
11580FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
11581{
11582 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
11583 IEM_MC_BEGIN(3, 2);
11584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11585 IEM_MC_LOCAL(uint16_t, u16Fsw);
11586 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11587 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11588 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11589
11590 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11592 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11593 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11594
11595 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11596 IEM_MC_PREPARE_FPU_USAGE();
11597 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11598 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11599 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11600 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11601 } IEM_MC_ELSE() {
11602 IEM_MC_IF_FCW_IM() {
11603 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11604 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11605 } IEM_MC_ENDIF();
11606 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11607 } IEM_MC_ENDIF();
11608 IEM_MC_ADVANCE_RIP_AND_FINISH();
11609
11610 IEM_MC_END();
11611}
11612
11613
11614/** Opcode 0xdf !11/3. */
11615FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
11616{
11617 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
11618 IEM_MC_BEGIN(3, 2);
11619 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11620 IEM_MC_LOCAL(uint16_t, u16Fsw);
11621 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11622 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11623 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11624
11625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11627 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11628 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11629
11630 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11631 IEM_MC_PREPARE_FPU_USAGE();
11632 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11633 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11634 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11635 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11636 } IEM_MC_ELSE() {
11637 IEM_MC_IF_FCW_IM() {
11638 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11639 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11640 } IEM_MC_ENDIF();
11641 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11642 } IEM_MC_ENDIF();
11643 IEM_MC_ADVANCE_RIP_AND_FINISH();
11644
11645 IEM_MC_END();
11646}
11647
11648
11649/** Opcode 0xdf !11/4. */
11650FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
11651{
11652 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
11653
11654 IEM_MC_BEGIN(2, 3);
11655 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11656 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11657 IEM_MC_LOCAL(RTPBCD80U, d80Val);
11658 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11659 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
11660
11661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11663
11664 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11665 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11666 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11667
11668 IEM_MC_PREPARE_FPU_USAGE();
11669 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11670 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
11671 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11672 } IEM_MC_ELSE() {
11673 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11674 } IEM_MC_ENDIF();
11675 IEM_MC_ADVANCE_RIP_AND_FINISH();
11676
11677 IEM_MC_END();
11678}
11679
11680
11681/** Opcode 0xdf !11/5. */
11682FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
11683{
11684 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
11685
11686 IEM_MC_BEGIN(2, 3);
11687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11688 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11689 IEM_MC_LOCAL(int64_t, i64Val);
11690 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11691 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
11692
11693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11695
11696 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11697 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11698 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11699
11700 IEM_MC_PREPARE_FPU_USAGE();
11701 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11702 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
11703 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11704 } IEM_MC_ELSE() {
11705 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11706 } IEM_MC_ENDIF();
11707 IEM_MC_ADVANCE_RIP_AND_FINISH();
11708
11709 IEM_MC_END();
11710}
11711
11712
11713/** Opcode 0xdf !11/6. */
11714FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
11715{
11716 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
11717 IEM_MC_BEGIN(3, 2);
11718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11719 IEM_MC_LOCAL(uint16_t, u16Fsw);
11720 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11721 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
11722 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11723
11724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11726 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11727 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11728
11729 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
11730 IEM_MC_PREPARE_FPU_USAGE();
11731 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11732 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
11733 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
11734 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11735 } IEM_MC_ELSE() {
11736 IEM_MC_IF_FCW_IM() {
11737 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
11738 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
11739 } IEM_MC_ENDIF();
11740 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11741 } IEM_MC_ENDIF();
11742 IEM_MC_ADVANCE_RIP_AND_FINISH();
11743
11744 IEM_MC_END();
11745}
11746
11747
11748/** Opcode 0xdf !11/7. */
11749FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
11750{
11751 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
11752 IEM_MC_BEGIN(3, 2);
11753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11754 IEM_MC_LOCAL(uint16_t, u16Fsw);
11755 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11756 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11757 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11758
11759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11761 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11762 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11763
11764 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11765 IEM_MC_PREPARE_FPU_USAGE();
11766 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11767 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11768 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11769 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11770 } IEM_MC_ELSE() {
11771 IEM_MC_IF_FCW_IM() {
11772 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11773 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
11774 } IEM_MC_ENDIF();
11775 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11776 } IEM_MC_ENDIF();
11777 IEM_MC_ADVANCE_RIP_AND_FINISH();
11778
11779 IEM_MC_END();
11780}
11781
11782
11783/**
11784 * @opcode 0xdf
11785 */
11786FNIEMOP_DEF(iemOp_EscF7)
11787{
11788 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11789 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
11790 if (IEM_IS_MODRM_REG_MODE(bRm))
11791 {
11792 switch (IEM_GET_MODRM_REG_8(bRm))
11793 {
11794 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
11795 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
11796 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11797 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11798 case 4: if (bRm == 0xe0)
11799 return FNIEMOP_CALL(iemOp_fnstsw_ax);
11800 IEMOP_RAISE_INVALID_OPCODE_RET();
11801 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
11802 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
11803 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11805 }
11806 }
11807 else
11808 {
11809 switch (IEM_GET_MODRM_REG_8(bRm))
11810 {
11811 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
11812 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
11813 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
11814 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
11815 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
11816 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
11817 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
11818 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
11819 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11820 }
11821 }
11822}
11823
11824
11825/**
11826 * @opcode 0xe0
11827 */
11828FNIEMOP_DEF(iemOp_loopne_Jb)
11829{
11830 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
11831 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11832 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11833
11834 switch (pVCpu->iem.s.enmEffAddrMode)
11835 {
11836 case IEMMODE_16BIT:
11837 IEM_MC_BEGIN(0,0);
11838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11839 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11840 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11841 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11842 } IEM_MC_ELSE() {
11843 IEM_MC_ADVANCE_RIP_AND_FINISH();
11844 } IEM_MC_ENDIF();
11845 IEM_MC_END();
11846 break;
11847
11848 case IEMMODE_32BIT:
11849 IEM_MC_BEGIN(0,0);
11850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11851 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11852 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11853 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11854 } IEM_MC_ELSE() {
11855 IEM_MC_ADVANCE_RIP_AND_FINISH();
11856 } IEM_MC_ENDIF();
11857 IEM_MC_END();
11858 break;
11859
11860 case IEMMODE_64BIT:
11861 IEM_MC_BEGIN(0,0);
11862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11863 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11864 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11865 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11866 } IEM_MC_ELSE() {
11867 IEM_MC_ADVANCE_RIP_AND_FINISH();
11868 } IEM_MC_ENDIF();
11869 IEM_MC_END();
11870 break;
11871
11872 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11873 }
11874}
11875
11876
11877/**
11878 * @opcode 0xe1
11879 */
11880FNIEMOP_DEF(iemOp_loope_Jb)
11881{
11882 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
11883 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11884 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11885
11886 switch (pVCpu->iem.s.enmEffAddrMode)
11887 {
11888 case IEMMODE_16BIT:
11889 IEM_MC_BEGIN(0,0);
11890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11891 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11892 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11893 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11894 } IEM_MC_ELSE() {
11895 IEM_MC_ADVANCE_RIP_AND_FINISH();
11896 } IEM_MC_ENDIF();
11897 IEM_MC_END();
11898 break;
11899
11900 case IEMMODE_32BIT:
11901 IEM_MC_BEGIN(0,0);
11902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11903 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11904 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11905 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11906 } IEM_MC_ELSE() {
11907 IEM_MC_ADVANCE_RIP_AND_FINISH();
11908 } IEM_MC_ENDIF();
11909 IEM_MC_END();
11910 break;
11911
11912 case IEMMODE_64BIT:
11913 IEM_MC_BEGIN(0,0);
11914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11915 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11916 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11917 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11918 } IEM_MC_ELSE() {
11919 IEM_MC_ADVANCE_RIP_AND_FINISH();
11920 } IEM_MC_ENDIF();
11921 IEM_MC_END();
11922 break;
11923
11924 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11925 }
11926}
11927
11928
11929/**
11930 * @opcode 0xe2
11931 */
11932FNIEMOP_DEF(iemOp_loop_Jb)
11933{
11934 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
11935 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11936 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11937
11938 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
11939 * using the 32-bit operand size override. How can that be restarted? See
11940 * weird pseudo code in intel manual. */
11941
11942 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
11943 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
11944 * the loop causes guest crashes, but when logging it's nice to skip a few million
11945 * lines of useless output. */
11946#if defined(LOG_ENABLED)
11947 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
11948 switch (pVCpu->iem.s.enmEffAddrMode)
11949 {
11950 case IEMMODE_16BIT:
11951 IEM_MC_BEGIN(0,0);
11952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11953 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
11954 IEM_MC_ADVANCE_RIP_AND_FINISH();
11955 IEM_MC_END();
11956 break;
11957
11958 case IEMMODE_32BIT:
11959 IEM_MC_BEGIN(0,0);
11960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11961 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
11962 IEM_MC_ADVANCE_RIP_AND_FINISH();
11963 IEM_MC_END();
11964 break;
11965
11966 case IEMMODE_64BIT:
11967 IEM_MC_BEGIN(0,0);
11968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11969 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
11970 IEM_MC_ADVANCE_RIP_AND_FINISH();
11971 IEM_MC_END();
11972 break;
11973
11974 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11975 }
11976#endif
11977
11978 switch (pVCpu->iem.s.enmEffAddrMode)
11979 {
11980 case IEMMODE_16BIT:
11981 IEM_MC_BEGIN(0,0);
11982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11983 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11984 IEM_MC_IF_CX_IS_NZ() {
11985 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11986 } IEM_MC_ELSE() {
11987 IEM_MC_ADVANCE_RIP_AND_FINISH();
11988 } IEM_MC_ENDIF();
11989 IEM_MC_END();
11990 break;
11991
11992 case IEMMODE_32BIT:
11993 IEM_MC_BEGIN(0,0);
11994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11995 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11996 IEM_MC_IF_ECX_IS_NZ() {
11997 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11998 } IEM_MC_ELSE() {
11999 IEM_MC_ADVANCE_RIP_AND_FINISH();
12000 } IEM_MC_ENDIF();
12001 IEM_MC_END();
12002 break;
12003
12004 case IEMMODE_64BIT:
12005 IEM_MC_BEGIN(0,0);
12006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12007 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12008 IEM_MC_IF_RCX_IS_NZ() {
12009 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12010 } IEM_MC_ELSE() {
12011 IEM_MC_ADVANCE_RIP_AND_FINISH();
12012 } IEM_MC_ENDIF();
12013 IEM_MC_END();
12014 break;
12015
12016 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12017 }
12018}
12019
12020
12021/**
12022 * @opcode 0xe3
12023 */
12024FNIEMOP_DEF(iemOp_jecxz_Jb)
12025{
12026 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
12027 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12028 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12029
12030 switch (pVCpu->iem.s.enmEffAddrMode)
12031 {
12032 case IEMMODE_16BIT:
12033 IEM_MC_BEGIN(0,0);
12034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12035 IEM_MC_IF_CX_IS_NZ() {
12036 IEM_MC_ADVANCE_RIP_AND_FINISH();
12037 } IEM_MC_ELSE() {
12038 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12039 } IEM_MC_ENDIF();
12040 IEM_MC_END();
12041 break;
12042
12043 case IEMMODE_32BIT:
12044 IEM_MC_BEGIN(0,0);
12045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12046 IEM_MC_IF_ECX_IS_NZ() {
12047 IEM_MC_ADVANCE_RIP_AND_FINISH();
12048 } IEM_MC_ELSE() {
12049 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12050 } IEM_MC_ENDIF();
12051 IEM_MC_END();
12052 break;
12053
12054 case IEMMODE_64BIT:
12055 IEM_MC_BEGIN(0,0);
12056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12057 IEM_MC_IF_RCX_IS_NZ() {
12058 IEM_MC_ADVANCE_RIP_AND_FINISH();
12059 } IEM_MC_ELSE() {
12060 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12061 } IEM_MC_ENDIF();
12062 IEM_MC_END();
12063 break;
12064
12065 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12066 }
12067}
12068
12069
12070/** Opcode 0xe4 */
12071FNIEMOP_DEF(iemOp_in_AL_Ib)
12072{
12073 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
12074 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12076 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12077 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12078}
12079
12080
12081/** Opcode 0xe5 */
12082FNIEMOP_DEF(iemOp_in_eAX_Ib)
12083{
12084 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
12085 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12087 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12088 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12089 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12090}
12091
12092
12093/** Opcode 0xe6 */
12094FNIEMOP_DEF(iemOp_out_Ib_AL)
12095{
12096 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
12097 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12099 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12100 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12101}
12102
12103
12104/** Opcode 0xe7 */
12105FNIEMOP_DEF(iemOp_out_Ib_eAX)
12106{
12107 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
12108 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12110 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12111 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12112 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12113}
12114
12115
12116/**
12117 * @opcode 0xe8
12118 */
12119FNIEMOP_DEF(iemOp_call_Jv)
12120{
12121 IEMOP_MNEMONIC(call_Jv, "call Jv");
12122 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12123 switch (pVCpu->iem.s.enmEffOpSize)
12124 {
12125 case IEMMODE_16BIT:
12126 {
12127 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12128 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_16, (int16_t)u16Imm);
12129 }
12130
12131 case IEMMODE_32BIT:
12132 {
12133 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12134 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_32, (int32_t)u32Imm);
12135 }
12136
12137 case IEMMODE_64BIT:
12138 {
12139 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12140 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_64, u64Imm);
12141 }
12142
12143 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12144 }
12145}
12146
12147
12148/**
12149 * @opcode 0xe9
12150 */
12151FNIEMOP_DEF(iemOp_jmp_Jv)
12152{
12153 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
12154 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12155 switch (pVCpu->iem.s.enmEffOpSize)
12156 {
12157 case IEMMODE_16BIT:
12158 {
12159 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
12160 IEM_MC_BEGIN(0, 0);
12161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12162 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
12163 IEM_MC_END();
12164 break;
12165 }
12166
12167 case IEMMODE_64BIT:
12168 case IEMMODE_32BIT:
12169 {
12170 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
12171 IEM_MC_BEGIN(0, 0);
12172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12173 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
12174 IEM_MC_END();
12175 break;
12176 }
12177
12178 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12179 }
12180}
12181
12182
12183/**
12184 * @opcode 0xea
12185 */
12186FNIEMOP_DEF(iemOp_jmp_Ap)
12187{
12188 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
12189 IEMOP_HLP_NO_64BIT();
12190
12191 /* Decode the far pointer address and pass it on to the far call C implementation. */
12192 uint32_t off32Seg;
12193 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12194 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
12195 else
12196 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
12197 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
12198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12199 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
12200 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12201 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
12202}
12203
12204
12205/**
12206 * @opcode 0xeb
12207 */
12208FNIEMOP_DEF(iemOp_jmp_Jb)
12209{
12210 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
12211 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12212 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12213
12214 IEM_MC_BEGIN(0, 0);
12215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12216 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12217 IEM_MC_END();
12218}
12219
12220
12221/** Opcode 0xec */
12222FNIEMOP_DEF(iemOp_in_AL_DX)
12223{
12224 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
12225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12226 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12227 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
12228}
12229
12230
12231/** Opcode 0xed */
12232FNIEMOP_DEF(iemOp_in_eAX_DX)
12233{
12234 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
12235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12236 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12237 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12238 pVCpu->iem.s.enmEffAddrMode);
12239}
12240
12241
12242/** Opcode 0xee */
12243FNIEMOP_DEF(iemOp_out_DX_AL)
12244{
12245 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
12246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12247 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12248 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
12249}
12250
12251
12252/** Opcode 0xef */
12253FNIEMOP_DEF(iemOp_out_DX_eAX)
12254{
12255 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
12256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12257 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12258 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12259 pVCpu->iem.s.enmEffAddrMode);
12260}
12261
12262
12263/**
12264 * @opcode 0xf0
12265 */
12266FNIEMOP_DEF(iemOp_lock)
12267{
12268 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
12269 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12270 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
12271
12272 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12273 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12274}
12275
12276
12277/**
12278 * @opcode 0xf1
12279 */
12280FNIEMOP_DEF(iemOp_int1)
12281{
12282 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
12283 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
12284 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
12285 * LOADALL memo. Needs some testing. */
12286 IEMOP_HLP_MIN_386();
12287 /** @todo testcase! */
12288 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
12289 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
12290 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
12291}
12292
12293
12294/**
12295 * @opcode 0xf2
12296 */
12297FNIEMOP_DEF(iemOp_repne)
12298{
12299 /* This overrides any previous REPE prefix. */
12300 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
12301 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
12302 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
12303
12304 /* For the 4 entry opcode tables, REPNZ overrides any previous
12305 REPZ and operand size prefixes. */
12306 pVCpu->iem.s.idxPrefix = 3;
12307
12308 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12309 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12310}
12311
12312
12313/**
12314 * @opcode 0xf3
12315 */
12316FNIEMOP_DEF(iemOp_repe)
12317{
12318 /* This overrides any previous REPNE prefix. */
12319 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
12320 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
12321 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
12322
12323 /* For the 4 entry opcode tables, REPNZ overrides any previous
12324 REPNZ and operand size prefixes. */
12325 pVCpu->iem.s.idxPrefix = 2;
12326
12327 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12328 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12329}
12330
12331
12332/**
12333 * @opcode 0xf4
12334 */
12335FNIEMOP_DEF(iemOp_hlt)
12336{
12337 IEMOP_MNEMONIC(hlt, "hlt");
12338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12339 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, iemCImpl_hlt);
12340}
12341
12342
12343/**
12344 * @opcode 0xf5
12345 */
12346FNIEMOP_DEF(iemOp_cmc)
12347{
12348 IEMOP_MNEMONIC(cmc, "cmc");
12349 IEM_MC_BEGIN(0, 0);
12350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12351 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
12352 IEM_MC_ADVANCE_RIP_AND_FINISH();
12353 IEM_MC_END();
12354}
12355
12356
12357/**
12358 * Body for of 'inc/dec/not/neg Eb'.
12359 */
12360#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
12361 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
12362 { \
12363 /* register access */ \
12364 IEM_MC_BEGIN(2, 0); \
12365 IEMOP_HLP_DONE_DECODING(); \
12366 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12367 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12368 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
12369 IEM_MC_REF_EFLAGS(pEFlags); \
12370 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12371 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12372 IEM_MC_END(); \
12373 } \
12374 else \
12375 { \
12376 /* memory access. */ \
12377 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
12378 { \
12379 IEM_MC_BEGIN(2, 2); \
12380 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12381 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12383 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12384 \
12385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12386 IEMOP_HLP_DONE_DECODING(); \
12387 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12388 IEM_MC_FETCH_EFLAGS(EFlags); \
12389 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12390 \
12391 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12392 IEM_MC_COMMIT_EFLAGS(EFlags); \
12393 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12394 IEM_MC_END(); \
12395 } \
12396 else \
12397 { \
12398 IEM_MC_BEGIN(2, 2); \
12399 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12400 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12402 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12403 \
12404 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12405 IEMOP_HLP_DONE_DECODING(); \
12406 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12407 IEM_MC_FETCH_EFLAGS(EFlags); \
12408 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
12409 \
12410 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12411 IEM_MC_COMMIT_EFLAGS(EFlags); \
12412 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12413 IEM_MC_END(); \
12414 } \
12415 } \
12416 (void)0
12417
12418
12419/**
12420 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
12421 */
12422#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
12423 if (IEM_IS_MODRM_REG_MODE(bRm)) \
12424 { \
12425 /* \
12426 * Register target \
12427 */ \
12428 switch (pVCpu->iem.s.enmEffOpSize) \
12429 { \
12430 case IEMMODE_16BIT: \
12431 IEM_MC_BEGIN(2, 0); \
12432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12433 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12434 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12435 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12436 IEM_MC_REF_EFLAGS(pEFlags); \
12437 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
12438 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12439 IEM_MC_END(); \
12440 break; \
12441 \
12442 case IEMMODE_32BIT: \
12443 IEM_MC_BEGIN(2, 0); \
12444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12445 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12446 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12447 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12448 IEM_MC_REF_EFLAGS(pEFlags); \
12449 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
12450 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
12451 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12452 IEM_MC_END(); \
12453 break; \
12454 \
12455 case IEMMODE_64BIT: \
12456 IEM_MC_BEGIN(2, 0); \
12457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12458 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12459 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12460 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12461 IEM_MC_REF_EFLAGS(pEFlags); \
12462 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
12463 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12464 IEM_MC_END(); \
12465 break; \
12466 \
12467 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12468 } \
12469 } \
12470 else \
12471 { \
12472 /* \
12473 * Memory target. \
12474 */ \
12475 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
12476 { \
12477 switch (pVCpu->iem.s.enmEffOpSize) \
12478 { \
12479 case IEMMODE_16BIT: \
12480 IEM_MC_BEGIN(2, 2); \
12481 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12482 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12484 \
12485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12487 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
12488 IEM_MC_FETCH_EFLAGS(EFlags); \
12489 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
12490 \
12491 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
12492 IEM_MC_COMMIT_EFLAGS(EFlags); \
12493 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12494 IEM_MC_END(); \
12495 break; \
12496 \
12497 case IEMMODE_32BIT: \
12498 IEM_MC_BEGIN(2, 2); \
12499 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12500 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12502 \
12503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12505 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
12506 IEM_MC_FETCH_EFLAGS(EFlags); \
12507 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
12508 \
12509 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
12510 IEM_MC_COMMIT_EFLAGS(EFlags); \
12511 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12512 IEM_MC_END(); \
12513 break; \
12514 \
12515 case IEMMODE_64BIT: \
12516 IEM_MC_BEGIN(2, 2); \
12517 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12518 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12520 \
12521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12523 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
12524 IEM_MC_FETCH_EFLAGS(EFlags); \
12525 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
12526 \
12527 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
12528 IEM_MC_COMMIT_EFLAGS(EFlags); \
12529 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12530 IEM_MC_END(); \
12531 break; \
12532 \
12533 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12534 } \
12535 } \
12536 else \
12537 { \
12538 (void)0
12539
12540#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
12541 switch (pVCpu->iem.s.enmEffOpSize) \
12542 { \
12543 case IEMMODE_16BIT: \
12544 IEM_MC_BEGIN(2, 2); \
12545 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12546 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12548 \
12549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12550 IEMOP_HLP_DONE_DECODING(); \
12551 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
12552 IEM_MC_FETCH_EFLAGS(EFlags); \
12553 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
12554 \
12555 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
12556 IEM_MC_COMMIT_EFLAGS(EFlags); \
12557 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12558 IEM_MC_END(); \
12559 break; \
12560 \
12561 case IEMMODE_32BIT: \
12562 IEM_MC_BEGIN(2, 2); \
12563 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12564 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12566 \
12567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12568 IEMOP_HLP_DONE_DECODING(); \
12569 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
12570 IEM_MC_FETCH_EFLAGS(EFlags); \
12571 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
12572 \
12573 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
12574 IEM_MC_COMMIT_EFLAGS(EFlags); \
12575 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12576 IEM_MC_END(); \
12577 break; \
12578 \
12579 case IEMMODE_64BIT: \
12580 IEM_MC_BEGIN(2, 2); \
12581 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12582 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12584 \
12585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12586 IEMOP_HLP_DONE_DECODING(); \
12587 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
12588 IEM_MC_FETCH_EFLAGS(EFlags); \
12589 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
12590 \
12591 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
12592 IEM_MC_COMMIT_EFLAGS(EFlags); \
12593 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12594 IEM_MC_END(); \
12595 break; \
12596 \
12597 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12598 } \
12599 } \
12600 } \
12601 (void)0
12602
12603
12604/**
12605 * @opmaps grp3_f6
12606 * @opcode /0
12607 * @todo also /1
12608 */
12609FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
12610{
12611 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
12612 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12613
12614 if (IEM_IS_MODRM_REG_MODE(bRm))
12615 {
12616 /* register access */
12617 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12618 IEM_MC_BEGIN(3, 0);
12619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12620 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12621 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
12622 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12623 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12624 IEM_MC_REF_EFLAGS(pEFlags);
12625 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
12626 IEM_MC_ADVANCE_RIP_AND_FINISH();
12627 IEM_MC_END();
12628 }
12629 else
12630 {
12631 /* memory access. */
12632 IEM_MC_BEGIN(3, 3);
12633 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
12634 IEM_MC_ARG(uint8_t, u8Src, 1);
12635 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12636 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12637 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12638
12639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12640 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12641 IEM_MC_ASSIGN(u8Src, u8Imm);
12642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12643 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12644 IEM_MC_FETCH_EFLAGS(EFlags);
12645 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
12646
12647 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo);
12648 IEM_MC_COMMIT_EFLAGS(EFlags);
12649 IEM_MC_ADVANCE_RIP_AND_FINISH();
12650 IEM_MC_END();
12651 }
12652}
12653
12654
12655/** Opcode 0xf6 /4, /5, /6 and /7. */
12656FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
12657{
12658 if (IEM_IS_MODRM_REG_MODE(bRm))
12659 {
12660 /* register access */
12661 IEM_MC_BEGIN(3, 1);
12662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12663 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12664 IEM_MC_ARG(uint8_t, u8Value, 1);
12665 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12666 IEM_MC_LOCAL(int32_t, rc);
12667
12668 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12669 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12670 IEM_MC_REF_EFLAGS(pEFlags);
12671 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12672 IEM_MC_IF_LOCAL_IS_Z(rc) {
12673 IEM_MC_ADVANCE_RIP_AND_FINISH();
12674 } IEM_MC_ELSE() {
12675 IEM_MC_RAISE_DIVIDE_ERROR();
12676 } IEM_MC_ENDIF();
12677
12678 IEM_MC_END();
12679 }
12680 else
12681 {
12682 /* memory access. */
12683 IEM_MC_BEGIN(3, 2);
12684 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12685 IEM_MC_ARG(uint8_t, u8Value, 1);
12686 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12688 IEM_MC_LOCAL(int32_t, rc);
12689
12690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12692 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12693 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12694 IEM_MC_REF_EFLAGS(pEFlags);
12695 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12696 IEM_MC_IF_LOCAL_IS_Z(rc) {
12697 IEM_MC_ADVANCE_RIP_AND_FINISH();
12698 } IEM_MC_ELSE() {
12699 IEM_MC_RAISE_DIVIDE_ERROR();
12700 } IEM_MC_ENDIF();
12701
12702 IEM_MC_END();
12703 }
12704}
12705
12706
12707/** Opcode 0xf7 /4, /5, /6 and /7. */
12708FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
12709{
12710 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12711
12712 if (IEM_IS_MODRM_REG_MODE(bRm))
12713 {
12714 /* register access */
12715 switch (pVCpu->iem.s.enmEffOpSize)
12716 {
12717 case IEMMODE_16BIT:
12718 {
12719 IEM_MC_BEGIN(4, 1);
12720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12721 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12722 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12723 IEM_MC_ARG(uint16_t, u16Value, 2);
12724 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12725 IEM_MC_LOCAL(int32_t, rc);
12726
12727 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12728 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12729 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12730 IEM_MC_REF_EFLAGS(pEFlags);
12731 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12732 IEM_MC_IF_LOCAL_IS_Z(rc) {
12733 IEM_MC_ADVANCE_RIP_AND_FINISH();
12734 } IEM_MC_ELSE() {
12735 IEM_MC_RAISE_DIVIDE_ERROR();
12736 } IEM_MC_ENDIF();
12737
12738 IEM_MC_END();
12739 break;
12740 }
12741
12742 case IEMMODE_32BIT:
12743 {
12744 IEM_MC_BEGIN(4, 1);
12745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12746 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12747 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12748 IEM_MC_ARG(uint32_t, u32Value, 2);
12749 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12750 IEM_MC_LOCAL(int32_t, rc);
12751
12752 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12753 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12754 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12755 IEM_MC_REF_EFLAGS(pEFlags);
12756 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12757 IEM_MC_IF_LOCAL_IS_Z(rc) {
12758 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12759 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12760 IEM_MC_ADVANCE_RIP_AND_FINISH();
12761 } IEM_MC_ELSE() {
12762 IEM_MC_RAISE_DIVIDE_ERROR();
12763 } IEM_MC_ENDIF();
12764
12765 IEM_MC_END();
12766 break;
12767 }
12768
12769 case IEMMODE_64BIT:
12770 {
12771 IEM_MC_BEGIN(4, 1);
12772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12773 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12774 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12775 IEM_MC_ARG(uint64_t, u64Value, 2);
12776 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12777 IEM_MC_LOCAL(int32_t, rc);
12778
12779 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12780 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12781 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12782 IEM_MC_REF_EFLAGS(pEFlags);
12783 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12784 IEM_MC_IF_LOCAL_IS_Z(rc) {
12785 IEM_MC_ADVANCE_RIP_AND_FINISH();
12786 } IEM_MC_ELSE() {
12787 IEM_MC_RAISE_DIVIDE_ERROR();
12788 } IEM_MC_ENDIF();
12789
12790 IEM_MC_END();
12791 break;
12792 }
12793
12794 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12795 }
12796 }
12797 else
12798 {
12799 /* memory access. */
12800 switch (pVCpu->iem.s.enmEffOpSize)
12801 {
12802 case IEMMODE_16BIT:
12803 {
12804 IEM_MC_BEGIN(4, 2);
12805 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12806 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12807 IEM_MC_ARG(uint16_t, u16Value, 2);
12808 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12810 IEM_MC_LOCAL(int32_t, rc);
12811
12812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12814 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12815 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12816 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12817 IEM_MC_REF_EFLAGS(pEFlags);
12818 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12819 IEM_MC_IF_LOCAL_IS_Z(rc) {
12820 IEM_MC_ADVANCE_RIP_AND_FINISH();
12821 } IEM_MC_ELSE() {
12822 IEM_MC_RAISE_DIVIDE_ERROR();
12823 } IEM_MC_ENDIF();
12824
12825 IEM_MC_END();
12826 break;
12827 }
12828
12829 case IEMMODE_32BIT:
12830 {
12831 IEM_MC_BEGIN(4, 2);
12832 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12833 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12834 IEM_MC_ARG(uint32_t, u32Value, 2);
12835 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12837 IEM_MC_LOCAL(int32_t, rc);
12838
12839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12841 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12842 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12843 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12844 IEM_MC_REF_EFLAGS(pEFlags);
12845 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12846 IEM_MC_IF_LOCAL_IS_Z(rc) {
12847 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12848 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12849 IEM_MC_ADVANCE_RIP_AND_FINISH();
12850 } IEM_MC_ELSE() {
12851 IEM_MC_RAISE_DIVIDE_ERROR();
12852 } IEM_MC_ENDIF();
12853
12854 IEM_MC_END();
12855 break;
12856 }
12857
12858 case IEMMODE_64BIT:
12859 {
12860 IEM_MC_BEGIN(4, 2);
12861 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12862 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12863 IEM_MC_ARG(uint64_t, u64Value, 2);
12864 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12866 IEM_MC_LOCAL(int32_t, rc);
12867
12868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12870 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12871 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12872 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12873 IEM_MC_REF_EFLAGS(pEFlags);
12874 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12875 IEM_MC_IF_LOCAL_IS_Z(rc) {
12876 IEM_MC_ADVANCE_RIP_AND_FINISH();
12877 } IEM_MC_ELSE() {
12878 IEM_MC_RAISE_DIVIDE_ERROR();
12879 } IEM_MC_ENDIF();
12880
12881 IEM_MC_END();
12882 break;
12883 }
12884
12885 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12886 }
12887 }
12888}
12889
12890
12891/**
12892 * @opmaps grp3_f6
12893 * @opcode /2
12894 */
12895FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
12896{
12897 IEMOP_MNEMONIC(not_Eb, "not Eb");
12898 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
12899}
12900
12901
12902/**
12903 * @opmaps grp3_f6
12904 * @opcode /3
12905 */
12906FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
12907{
12908 IEMOP_MNEMONIC(net_Eb, "neg Eb");
12909 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
12910}
12911
12912
12913/**
12914 * @opcode 0xf6
12915 */
12916FNIEMOP_DEF(iemOp_Grp3_Eb)
12917{
12918 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12919 switch (IEM_GET_MODRM_REG_8(bRm))
12920 {
12921 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12922 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12923 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
12924 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
12925 case 4:
12926 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
12927 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12928 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
12929 case 5:
12930 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
12931 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12932 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
12933 case 6:
12934 IEMOP_MNEMONIC(div_Eb, "div Eb");
12935 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12936 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
12937 case 7:
12938 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
12939 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12940 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
12941 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12942 }
12943}
12944
12945
12946/** Opcode 0xf7 /0. */
12947FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
12948{
12949 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
12950 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12951
12952 if (IEM_IS_MODRM_REG_MODE(bRm))
12953 {
12954 /* register access */
12955 switch (pVCpu->iem.s.enmEffOpSize)
12956 {
12957 case IEMMODE_16BIT:
12958 {
12959 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12960 IEM_MC_BEGIN(3, 0);
12961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12962 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12963 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
12964 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12965 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12966 IEM_MC_REF_EFLAGS(pEFlags);
12967 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12968 IEM_MC_ADVANCE_RIP_AND_FINISH();
12969 IEM_MC_END();
12970 break;
12971 }
12972
12973 case IEMMODE_32BIT:
12974 {
12975 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12976 IEM_MC_BEGIN(3, 0);
12977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12978 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12979 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
12980 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12981 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12982 IEM_MC_REF_EFLAGS(pEFlags);
12983 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
12984 /* No clearing the high dword here - test doesn't write back the result. */
12985 IEM_MC_ADVANCE_RIP_AND_FINISH();
12986 IEM_MC_END();
12987 break;
12988 }
12989
12990 case IEMMODE_64BIT:
12991 {
12992 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12993 IEM_MC_BEGIN(3, 0);
12994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12995 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12996 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
12997 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12998 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12999 IEM_MC_REF_EFLAGS(pEFlags);
13000 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13001 IEM_MC_ADVANCE_RIP_AND_FINISH();
13002 IEM_MC_END();
13003 break;
13004 }
13005
13006 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13007 }
13008 }
13009 else
13010 {
13011 /* memory access. */
13012 switch (pVCpu->iem.s.enmEffOpSize)
13013 {
13014 case IEMMODE_16BIT:
13015 {
13016 IEM_MC_BEGIN(3, 2);
13017 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13018 IEM_MC_ARG(uint16_t, u16Src, 1);
13019 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13021
13022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13023 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13024 IEM_MC_ASSIGN(u16Src, u16Imm);
13025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13026 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13027 IEM_MC_FETCH_EFLAGS(EFlags);
13028 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13029
13030 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
13031 IEM_MC_COMMIT_EFLAGS(EFlags);
13032 IEM_MC_ADVANCE_RIP_AND_FINISH();
13033 IEM_MC_END();
13034 break;
13035 }
13036
13037 case IEMMODE_32BIT:
13038 {
13039 IEM_MC_BEGIN(3, 2);
13040 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13041 IEM_MC_ARG(uint32_t, u32Src, 1);
13042 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13044
13045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13046 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13047 IEM_MC_ASSIGN(u32Src, u32Imm);
13048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13049 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13050 IEM_MC_FETCH_EFLAGS(EFlags);
13051 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13052
13053 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
13054 IEM_MC_COMMIT_EFLAGS(EFlags);
13055 IEM_MC_ADVANCE_RIP_AND_FINISH();
13056 IEM_MC_END();
13057 break;
13058 }
13059
13060 case IEMMODE_64BIT:
13061 {
13062 IEM_MC_BEGIN(3, 2);
13063 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13064 IEM_MC_ARG(uint64_t, u64Src, 1);
13065 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13067
13068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13069 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13070 IEM_MC_ASSIGN(u64Src, u64Imm);
13071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13072 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13073 IEM_MC_FETCH_EFLAGS(EFlags);
13074 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13075
13076 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
13077 IEM_MC_COMMIT_EFLAGS(EFlags);
13078 IEM_MC_ADVANCE_RIP_AND_FINISH();
13079 IEM_MC_END();
13080 break;
13081 }
13082
13083 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13084 }
13085 }
13086}
13087
13088
13089/** Opcode 0xf7 /2. */
13090FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
13091{
13092 IEMOP_MNEMONIC(not_Ev, "not Ev");
13093 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
13094 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
13095}
13096
13097
13098/** Opcode 0xf7 /3. */
13099FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
13100{
13101 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
13102 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
13103 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
13104}
13105
13106
13107/**
13108 * @opcode 0xf7
13109 */
13110FNIEMOP_DEF(iemOp_Grp3_Ev)
13111{
13112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13113 switch (IEM_GET_MODRM_REG_8(bRm))
13114 {
13115 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13116 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13117 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
13118 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
13119 case 4:
13120 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
13121 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13122 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
13123 case 5:
13124 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
13125 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13126 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
13127 case 6:
13128 IEMOP_MNEMONIC(div_Ev, "div Ev");
13129 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13130 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
13131 case 7:
13132 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
13133 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13134 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
13135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13136 }
13137}
13138
13139
13140/**
13141 * @opcode 0xf8
13142 */
13143FNIEMOP_DEF(iemOp_clc)
13144{
13145 IEMOP_MNEMONIC(clc, "clc");
13146 IEM_MC_BEGIN(0, 0);
13147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13148 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
13149 IEM_MC_ADVANCE_RIP_AND_FINISH();
13150 IEM_MC_END();
13151}
13152
13153
13154/**
13155 * @opcode 0xf9
13156 */
13157FNIEMOP_DEF(iemOp_stc)
13158{
13159 IEMOP_MNEMONIC(stc, "stc");
13160 IEM_MC_BEGIN(0, 0);
13161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13162 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
13163 IEM_MC_ADVANCE_RIP_AND_FINISH();
13164 IEM_MC_END();
13165}
13166
13167
13168/**
13169 * @opcode 0xfa
13170 */
13171FNIEMOP_DEF(iemOp_cli)
13172{
13173 IEMOP_MNEMONIC(cli, "cli");
13174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13175 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, iemCImpl_cli);
13176}
13177
13178
13179FNIEMOP_DEF(iemOp_sti)
13180{
13181 IEMOP_MNEMONIC(sti, "sti");
13182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13183 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER | IEM_CIMPL_F_VMEXIT, iemCImpl_sti);
13184}
13185
13186
13187/**
13188 * @opcode 0xfc
13189 */
13190FNIEMOP_DEF(iemOp_cld)
13191{
13192 IEMOP_MNEMONIC(cld, "cld");
13193 IEM_MC_BEGIN(0, 0);
13194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13195 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
13196 IEM_MC_ADVANCE_RIP_AND_FINISH();
13197 IEM_MC_END();
13198}
13199
13200
13201/**
13202 * @opcode 0xfd
13203 */
13204FNIEMOP_DEF(iemOp_std)
13205{
13206 IEMOP_MNEMONIC(std, "std");
13207 IEM_MC_BEGIN(0, 0);
13208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13209 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
13210 IEM_MC_ADVANCE_RIP_AND_FINISH();
13211 IEM_MC_END();
13212}
13213
13214
13215/**
13216 * @opmaps grp4
13217 * @opcode /0
13218 */
13219FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
13220{
13221 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
13222 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
13223}
13224
13225
13226/**
13227 * @opmaps grp4
13228 * @opcode /1
13229 */
13230FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
13231{
13232 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
13233 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
13234}
13235
13236
13237/**
13238 * @opcode 0xfe
13239 */
13240FNIEMOP_DEF(iemOp_Grp4)
13241{
13242 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13243 switch (IEM_GET_MODRM_REG_8(bRm))
13244 {
13245 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
13246 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
13247 default:
13248 /** @todo is the eff-addr decoded? */
13249 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
13250 IEMOP_RAISE_INVALID_OPCODE_RET();
13251 }
13252}
13253
13254/** Opcode 0xff /0. */
13255FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
13256{
13257 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
13258 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
13259 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
13260}
13261
13262
13263/** Opcode 0xff /1. */
13264FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
13265{
13266 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
13267 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
13268 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
13269}
13270
13271
13272/**
13273 * Opcode 0xff /2.
13274 * @param bRm The RM byte.
13275 */
13276FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
13277{
13278 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
13279 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13280
13281 if (IEM_IS_MODRM_REG_MODE(bRm))
13282 {
13283 /* The new RIP is taken from a register. */
13284 switch (pVCpu->iem.s.enmEffOpSize)
13285 {
13286 case IEMMODE_16BIT:
13287 IEM_MC_BEGIN(1, 0);
13288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13289 IEM_MC_ARG(uint16_t, u16Target, 0);
13290 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13291 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
13292 IEM_MC_END();
13293 break;
13294
13295 case IEMMODE_32BIT:
13296 IEM_MC_BEGIN(1, 0);
13297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13298 IEM_MC_ARG(uint32_t, u32Target, 0);
13299 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13300 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
13301 IEM_MC_END();
13302 break;
13303
13304 case IEMMODE_64BIT:
13305 IEM_MC_BEGIN(1, 0);
13306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13307 IEM_MC_ARG(uint64_t, u64Target, 0);
13308 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13309 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
13310 IEM_MC_END();
13311 break;
13312
13313 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13314 }
13315 }
13316 else
13317 {
13318 /* The new RIP is taken from a register. */
13319 switch (pVCpu->iem.s.enmEffOpSize)
13320 {
13321 case IEMMODE_16BIT:
13322 IEM_MC_BEGIN(1, 1);
13323 IEM_MC_ARG(uint16_t, u16Target, 0);
13324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13325 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13327 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13328 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
13329 IEM_MC_END();
13330 break;
13331
13332 case IEMMODE_32BIT:
13333 IEM_MC_BEGIN(1, 1);
13334 IEM_MC_ARG(uint32_t, u32Target, 0);
13335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13338 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13339 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
13340 IEM_MC_END();
13341 break;
13342
13343 case IEMMODE_64BIT:
13344 IEM_MC_BEGIN(1, 1);
13345 IEM_MC_ARG(uint64_t, u64Target, 0);
13346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13349 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13350 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
13351 IEM_MC_END();
13352 break;
13353
13354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13355 }
13356 }
13357}
13358
13359#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl) \
13360 /* Registers? How?? */ \
13361 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
13362 { /* likely */ } \
13363 else \
13364 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
13365 \
13366 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
13367 /** @todo what does VIA do? */ \
13368 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
13369 { /* likely */ } \
13370 else \
13371 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
13372 \
13373 /* Far pointer loaded from memory. */ \
13374 switch (pVCpu->iem.s.enmEffOpSize) \
13375 { \
13376 case IEMMODE_16BIT: \
13377 IEM_MC_BEGIN(3, 1); \
13378 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13379 IEM_MC_ARG(uint16_t, offSeg, 1); \
13380 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
13381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13384 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13385 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
13386 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
13387 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
13388 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13389 IEM_MC_END(); \
13390 break; \
13391 \
13392 case IEMMODE_32BIT: \
13393 IEM_MC_BEGIN(3, 1); \
13394 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13395 IEM_MC_ARG(uint32_t, offSeg, 1); \
13396 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
13397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13400 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13401 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
13402 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
13403 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
13404 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13405 IEM_MC_END(); \
13406 break; \
13407 \
13408 case IEMMODE_64BIT: \
13409 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
13410 IEM_MC_BEGIN(3, 1); \
13411 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13412 IEM_MC_ARG(uint64_t, offSeg, 1); \
13413 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
13414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13417 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13418 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
13419 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE /* no gates */, \
13420 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13421 IEM_MC_END(); \
13422 break; \
13423 \
13424 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13425 } do {} while (0)
13426
13427
13428/**
13429 * Opcode 0xff /3.
13430 * @param bRm The RM byte.
13431 */
13432FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
13433{
13434 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
13435 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf);
13436}
13437
13438
13439/**
13440 * Opcode 0xff /4.
13441 * @param bRm The RM byte.
13442 */
13443FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
13444{
13445 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
13446 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13447
13448 if (IEM_IS_MODRM_REG_MODE(bRm))
13449 {
13450 /* The new RIP is taken from a register. */
13451 switch (pVCpu->iem.s.enmEffOpSize)
13452 {
13453 case IEMMODE_16BIT:
13454 IEM_MC_BEGIN(0, 1);
13455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13456 IEM_MC_LOCAL(uint16_t, u16Target);
13457 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13458 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
13459 IEM_MC_END();
13460 break;
13461
13462 case IEMMODE_32BIT:
13463 IEM_MC_BEGIN(0, 1);
13464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13465 IEM_MC_LOCAL(uint32_t, u32Target);
13466 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13467 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
13468 IEM_MC_END();
13469 break;
13470
13471 case IEMMODE_64BIT:
13472 IEM_MC_BEGIN(0, 1);
13473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13474 IEM_MC_LOCAL(uint64_t, u64Target);
13475 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13476 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
13477 IEM_MC_END();
13478 break;
13479
13480 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13481 }
13482 }
13483 else
13484 {
13485 /* The new RIP is taken from a memory location. */
13486 switch (pVCpu->iem.s.enmEffOpSize)
13487 {
13488 case IEMMODE_16BIT:
13489 IEM_MC_BEGIN(0, 2);
13490 IEM_MC_LOCAL(uint16_t, u16Target);
13491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13494 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13495 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
13496 IEM_MC_END();
13497 break;
13498
13499 case IEMMODE_32BIT:
13500 IEM_MC_BEGIN(0, 2);
13501 IEM_MC_LOCAL(uint32_t, u32Target);
13502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13505 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13506 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
13507 IEM_MC_END();
13508 break;
13509
13510 case IEMMODE_64BIT:
13511 IEM_MC_BEGIN(0, 2);
13512 IEM_MC_LOCAL(uint64_t, u64Target);
13513 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13516 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13517 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
13518 IEM_MC_END();
13519 break;
13520
13521 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13522 }
13523 }
13524}
13525
13526
13527/**
13528 * Opcode 0xff /5.
13529 * @param bRm The RM byte.
13530 */
13531FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
13532{
13533 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
13534 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp);
13535}
13536
13537
13538/**
13539 * Opcode 0xff /6.
13540 * @param bRm The RM byte.
13541 */
13542FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
13543{
13544 IEMOP_MNEMONIC(push_Ev, "push Ev");
13545
13546 /* Registers are handled by a common worker. */
13547 if (IEM_IS_MODRM_REG_MODE(bRm))
13548 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
13549
13550 /* Memory we do here. */
13551 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13552 switch (pVCpu->iem.s.enmEffOpSize)
13553 {
13554 case IEMMODE_16BIT:
13555 IEM_MC_BEGIN(0, 2);
13556 IEM_MC_LOCAL(uint16_t, u16Src);
13557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13560 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13561 IEM_MC_PUSH_U16(u16Src);
13562 IEM_MC_ADVANCE_RIP_AND_FINISH();
13563 IEM_MC_END();
13564 break;
13565
13566 case IEMMODE_32BIT:
13567 IEM_MC_BEGIN(0, 2);
13568 IEM_MC_LOCAL(uint32_t, u32Src);
13569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13572 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13573 IEM_MC_PUSH_U32(u32Src);
13574 IEM_MC_ADVANCE_RIP_AND_FINISH();
13575 IEM_MC_END();
13576 break;
13577
13578 case IEMMODE_64BIT:
13579 IEM_MC_BEGIN(0, 2);
13580 IEM_MC_LOCAL(uint64_t, u64Src);
13581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13584 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13585 IEM_MC_PUSH_U64(u64Src);
13586 IEM_MC_ADVANCE_RIP_AND_FINISH();
13587 IEM_MC_END();
13588 break;
13589
13590 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13591 }
13592}
13593
13594
13595/**
13596 * @opcode 0xff
13597 */
13598FNIEMOP_DEF(iemOp_Grp5)
13599{
13600 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13601 switch (IEM_GET_MODRM_REG_8(bRm))
13602 {
13603 case 0:
13604 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
13605 case 1:
13606 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
13607 case 2:
13608 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
13609 case 3:
13610 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
13611 case 4:
13612 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
13613 case 5:
13614 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
13615 case 6:
13616 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
13617 case 7:
13618 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
13619 IEMOP_RAISE_INVALID_OPCODE_RET();
13620 }
13621 AssertFailedReturn(VERR_IEM_IPE_3);
13622}
13623
13624
13625
13626const PFNIEMOP g_apfnOneByteMap[256] =
13627{
13628 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
13629 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
13630 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
13631 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
13632 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
13633 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
13634 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
13635 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
13636 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
13637 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
13638 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
13639 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
13640 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
13641 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
13642 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
13643 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
13644 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
13645 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
13646 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
13647 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
13648 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
13649 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
13650 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
13651 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
13652 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
13653 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
13654 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
13655 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
13656 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
13657 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
13658 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
13659 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
13660 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
13661 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
13662 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
13663 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
13664 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
13665 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
13666 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
13667 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
13668 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
13669 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
13670 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
13671 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
13672 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
13673 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
13674 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
13675 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
13676 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
13677 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
13678 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
13679 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
13680 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
13681 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
13682 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
13683 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
13684 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
13685 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
13686 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
13687 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
13688 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
13689 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
13690 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
13691 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
13692};
13693
13694
13695/** @} */
13696
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette