VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 101189

Last change on this file since 101189 was 100858, checked in by vboxsync, 16 months ago

VMM/IEM: Mark 32-bit pushes and pops as not possible in 64-bit mode. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 488.5 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 100858 2023-08-11 11:24:13Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 *
63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
64 */
65#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8) \
66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
67 \
68 /* \
69 * If rm is denoting a register, no more instruction bytes. \
70 */ \
71 if (IEM_IS_MODRM_REG_MODE(bRm)) \
72 { \
73 IEM_MC_BEGIN(3, 0, 0); \
74 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
75 IEM_MC_ARG(uint8_t, u8Src, 1); \
76 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
77 \
78 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
79 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
80 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
81 IEM_MC_REF_EFLAGS(pEFlags); \
82 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
83 \
84 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
85 IEM_MC_END(); \
86 } \
87 else \
88 { \
89 /* \
90 * We're accessing memory. \
91 * Note! We're putting the eflags on the stack here so we can commit them \
92 * after the memory. \
93 */ \
94 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
95 { \
96 IEM_MC_BEGIN(3, 3, 0); \
97 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
98 IEM_MC_ARG(uint8_t, u8Src, 1); \
99 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
101 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
102 \
103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
104 IEMOP_HLP_DONE_DECODING(); \
105 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
106 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
107 IEM_MC_FETCH_EFLAGS(EFlags); \
108 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
109 \
110 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
111 IEM_MC_COMMIT_EFLAGS(EFlags); \
112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
113 IEM_MC_END(); \
114 } \
115 else \
116 { \
117 (void)0
118
119/**
120 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
121 * operands.
122 *
123 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
124 */
125#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8) \
126 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
127 \
128 /* \
129 * If rm is denoting a register, no more instruction bytes. \
130 */ \
131 if (IEM_IS_MODRM_REG_MODE(bRm)) \
132 { \
133 IEM_MC_BEGIN(3, 0, 0); \
134 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
135 IEM_MC_ARG(uint8_t, u8Src, 1); \
136 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
137 \
138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
139 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
140 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
141 IEM_MC_REF_EFLAGS(pEFlags); \
142 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
143 \
144 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
145 IEM_MC_END(); \
146 } \
147 else \
148 { \
149 /* \
150 * We're accessing memory. \
151 * Note! We're putting the eflags on the stack here so we can commit them \
152 * after the memory. \
153 */ \
154 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
155 { \
156 IEM_MC_BEGIN(3, 3, 0); \
157 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
158 IEM_MC_ARG(uint8_t, u8Src, 1); \
159 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
161 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
162 \
163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
164 IEMOP_HLP_DONE_DECODING(); \
165 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
166 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
167 IEM_MC_FETCH_EFLAGS(EFlags); \
168 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
169 \
170 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
171 IEM_MC_COMMIT_EFLAGS(EFlags); \
172 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
173 IEM_MC_END(); \
174 } \
175 else \
176 { \
177 (void)0
178
179#define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \
180 IEMOP_HLP_DONE_DECODING(); \
181 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
182 } \
183 } \
184 (void)0
185
186#define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \
187 IEM_MC_BEGIN(3, 3, 0); \
188 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
189 IEM_MC_ARG(uint8_t, u8Src, 1); \
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
192 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
193 \
194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
195 IEMOP_HLP_DONE_DECODING(); \
196 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
197 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
198 IEM_MC_FETCH_EFLAGS(EFlags); \
199 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
200 \
201 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bMapInfoDst); \
202 IEM_MC_COMMIT_EFLAGS(EFlags); \
203 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
204 IEM_MC_END(); \
205 } \
206 } \
207 (void)0
208
209/**
210 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
211 * destination.
212 */
213#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
215 \
216 /* \
217 * If rm is denoting a register, no more instruction bytes. \
218 */ \
219 if (IEM_IS_MODRM_REG_MODE(bRm)) \
220 { \
221 IEM_MC_BEGIN(3, 0, 0); \
222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
223 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
224 IEM_MC_ARG(uint8_t, u8Src, 1); \
225 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
226 \
227 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
228 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
229 IEM_MC_REF_EFLAGS(pEFlags); \
230 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
231 \
232 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
233 IEM_MC_END(); \
234 } \
235 else \
236 { \
237 /* \
238 * We're accessing memory. \
239 */ \
240 IEM_MC_BEGIN(3, 1, 0); \
241 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
242 IEM_MC_ARG(uint8_t, u8Src, 1); \
243 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
245 \
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
248 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
249 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
250 IEM_MC_REF_EFLAGS(pEFlags); \
251 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
252 \
253 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
254 IEM_MC_END(); \
255 } \
256 (void)0
257
258
259/**
260 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
261 * memory/register as the destination.
262 */
263#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
265 \
266 /* \
267 * If rm is denoting a register, no more instruction bytes. \
268 */ \
269 if (IEM_IS_MODRM_REG_MODE(bRm)) \
270 { \
271 switch (pVCpu->iem.s.enmEffOpSize) \
272 { \
273 case IEMMODE_16BIT: \
274 IEM_MC_BEGIN(3, 0, 0); \
275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
276 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
277 IEM_MC_ARG(uint16_t, u16Src, 1); \
278 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
279 \
280 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
281 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
282 IEM_MC_REF_EFLAGS(pEFlags); \
283 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
284 \
285 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
286 IEM_MC_END(); \
287 break; \
288 \
289 case IEMMODE_32BIT: \
290 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
292 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
293 IEM_MC_ARG(uint32_t, u32Src, 1); \
294 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
295 \
296 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
297 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
298 IEM_MC_REF_EFLAGS(pEFlags); \
299 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
300 \
301 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
302 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
303 IEM_MC_END(); \
304 break; \
305 \
306 case IEMMODE_64BIT: \
307 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT); \
308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
309 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
310 IEM_MC_ARG(uint64_t, u64Src, 1); \
311 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
312 \
313 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
314 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
315 IEM_MC_REF_EFLAGS(pEFlags); \
316 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
317 \
318 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
319 IEM_MC_END(); \
320 break; \
321 \
322 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
323 } \
324 } \
325 else \
326 { \
327 /* \
328 * We're accessing memory. \
329 * Note! We're putting the eflags on the stack here so we can commit them \
330 * after the memory. \
331 */ \
332 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
333 { \
334 switch (pVCpu->iem.s.enmEffOpSize) \
335 { \
336 case IEMMODE_16BIT: \
337 IEM_MC_BEGIN(3, 3, 0); \
338 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
339 IEM_MC_ARG(uint16_t, u16Src, 1); \
340 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
342 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
343 \
344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
345 IEMOP_HLP_DONE_DECODING(); \
346 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
347 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
348 IEM_MC_FETCH_EFLAGS(EFlags); \
349 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
350 \
351 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
352 IEM_MC_COMMIT_EFLAGS(EFlags); \
353 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
354 IEM_MC_END(); \
355 break; \
356 \
357 case IEMMODE_32BIT: \
358 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
359 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
360 IEM_MC_ARG(uint32_t, u32Src, 1); \
361 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
363 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
364 \
365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
366 IEMOP_HLP_DONE_DECODING(); \
367 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
368 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
369 IEM_MC_FETCH_EFLAGS(EFlags); \
370 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
371 \
372 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
373 IEM_MC_COMMIT_EFLAGS(EFlags); \
374 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
375 IEM_MC_END(); \
376 break; \
377 \
378 case IEMMODE_64BIT: \
379 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT); \
380 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
381 IEM_MC_ARG(uint64_t, u64Src, 1); \
382 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
384 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
385 \
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
387 IEMOP_HLP_DONE_DECODING(); \
388 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
389 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
390 IEM_MC_FETCH_EFLAGS(EFlags); \
391 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
392 \
393 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
394 IEM_MC_COMMIT_EFLAGS(EFlags); \
395 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
396 IEM_MC_END(); \
397 break; \
398 \
399 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
400 } \
401 } \
402 else \
403 { \
404 (void)0
405/* Separate macro to work around parsing issue in IEMAllInstPython.py */
406#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
407 switch (pVCpu->iem.s.enmEffOpSize) \
408 { \
409 case IEMMODE_16BIT: \
410 IEM_MC_BEGIN(3, 3, 0); \
411 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
412 IEM_MC_ARG(uint16_t, u16Src, 1); \
413 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
415 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
416 \
417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
418 IEMOP_HLP_DONE_DECODING(); \
419 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
420 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
421 IEM_MC_FETCH_EFLAGS(EFlags); \
422 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
423 \
424 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
425 IEM_MC_COMMIT_EFLAGS(EFlags); \
426 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
427 IEM_MC_END(); \
428 break; \
429 \
430 case IEMMODE_32BIT: \
431 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
432 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
433 IEM_MC_ARG(uint32_t, u32Src, 1); \
434 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
436 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
437 \
438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
439 IEMOP_HLP_DONE_DECODING(); \
440 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
441 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
442 IEM_MC_FETCH_EFLAGS(EFlags); \
443 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
444 \
445 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo /* CMP,TEST */); \
446 IEM_MC_COMMIT_EFLAGS(EFlags); \
447 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
448 IEM_MC_END(); \
449 break; \
450 \
451 case IEMMODE_64BIT: \
452 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT); \
453 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
454 IEM_MC_ARG(uint64_t, u64Src, 1); \
455 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
457 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
458 \
459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
460 IEMOP_HLP_DONE_DECODING(); \
461 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
462 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
463 IEM_MC_FETCH_EFLAGS(EFlags); \
464 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
465 \
466 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
467 IEM_MC_COMMIT_EFLAGS(EFlags); \
468 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
469 IEM_MC_END(); \
470 break; \
471 \
472 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
473 } \
474 } \
475 } \
476 (void)0
477
478/**
479 * Body for read-only word/dword/qword instructions like TEST and CMP with
480 * memory/register as the destination.
481 */
482#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
484 \
485 /* \
486 * If rm is denoting a register, no more instruction bytes. \
487 */ \
488 if (IEM_IS_MODRM_REG_MODE(bRm)) \
489 { \
490 switch (pVCpu->iem.s.enmEffOpSize) \
491 { \
492 case IEMMODE_16BIT: \
493 IEM_MC_BEGIN(3, 0, 0); \
494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
495 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
496 IEM_MC_ARG(uint16_t, u16Src, 1); \
497 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
498 \
499 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
500 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
501 IEM_MC_REF_EFLAGS(pEFlags); \
502 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
503 \
504 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
505 IEM_MC_END(); \
506 break; \
507 \
508 case IEMMODE_32BIT: \
509 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
511 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
512 IEM_MC_ARG(uint32_t, u32Src, 1); \
513 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
514 \
515 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
516 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
517 IEM_MC_REF_EFLAGS(pEFlags); \
518 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
519 \
520 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
521 IEM_MC_END(); \
522 break; \
523 \
524 case IEMMODE_64BIT: \
525 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT); \
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
527 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
528 IEM_MC_ARG(uint64_t, u64Src, 1); \
529 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
530 \
531 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
532 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
533 IEM_MC_REF_EFLAGS(pEFlags); \
534 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
535 \
536 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
537 IEM_MC_END(); \
538 break; \
539 \
540 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
541 } \
542 } \
543 else \
544 { \
545 /* \
546 * We're accessing memory. \
547 * Note! We're putting the eflags on the stack here so we can commit them \
548 * after the memory. \
549 */ \
550 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
551 { \
552 switch (pVCpu->iem.s.enmEffOpSize) \
553 { \
554 case IEMMODE_16BIT: \
555 IEM_MC_BEGIN(3, 3, 0); \
556 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
557 IEM_MC_ARG(uint16_t, u16Src, 1); \
558 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
560 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
561 \
562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
563 IEMOP_HLP_DONE_DECODING(); \
564 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
565 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
566 IEM_MC_FETCH_EFLAGS(EFlags); \
567 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
568 \
569 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
570 IEM_MC_COMMIT_EFLAGS(EFlags); \
571 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
572 IEM_MC_END(); \
573 break; \
574 \
575 case IEMMODE_32BIT: \
576 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
577 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
578 IEM_MC_ARG(uint32_t, u32Src, 1); \
579 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
581 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
582 \
583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
584 IEMOP_HLP_DONE_DECODING(); \
585 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
586 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
587 IEM_MC_FETCH_EFLAGS(EFlags); \
588 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
589 \
590 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
591 IEM_MC_COMMIT_EFLAGS(EFlags); \
592 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
593 IEM_MC_END(); \
594 break; \
595 \
596 case IEMMODE_64BIT: \
597 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT); \
598 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
599 IEM_MC_ARG(uint64_t, u64Src, 1); \
600 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
602 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
603 \
604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
605 IEMOP_HLP_DONE_DECODING(); \
606 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
607 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
608 IEM_MC_FETCH_EFLAGS(EFlags); \
609 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
610 \
611 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
612 IEM_MC_COMMIT_EFLAGS(EFlags); \
613 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
614 IEM_MC_END(); \
615 break; \
616 \
617 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
618 } \
619 } \
620 else \
621 { \
622 IEMOP_HLP_DONE_DECODING(); \
623 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
624 } \
625 } \
626 (void)0
627
628
629/**
630 * Body for instructions like ADD, AND, OR, ++ with working on AL with
631 * a byte immediate.
632 */
633#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
634 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
635 \
636 IEM_MC_BEGIN(3, 0, 0); \
637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
638 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
639 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
640 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
641 \
642 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
643 IEM_MC_REF_EFLAGS(pEFlags); \
644 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
645 \
646 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
647 IEM_MC_END()
648
649/**
650 * Body for instructions like ADD, AND, OR, ++ with working on
651 * AX/EAX/RAX with a word/dword immediate.
652 */
653#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
654 switch (pVCpu->iem.s.enmEffOpSize) \
655 { \
656 case IEMMODE_16BIT: \
657 { \
658 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
659 \
660 IEM_MC_BEGIN(3, 0, 0); \
661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
662 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
663 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
664 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
665 \
666 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
667 IEM_MC_REF_EFLAGS(pEFlags); \
668 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
669 \
670 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
671 IEM_MC_END(); \
672 } \
673 \
674 case IEMMODE_32BIT: \
675 { \
676 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
677 \
678 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
680 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
681 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
682 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
683 \
684 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
685 IEM_MC_REF_EFLAGS(pEFlags); \
686 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
687 \
688 if (a_fModifiesDstReg) \
689 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
690 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
691 IEM_MC_END(); \
692 } \
693 \
694 case IEMMODE_64BIT: \
695 { \
696 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
697 \
698 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT); \
699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
700 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
701 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
702 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
703 \
704 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
705 IEM_MC_REF_EFLAGS(pEFlags); \
706 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
707 \
708 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
709 IEM_MC_END(); \
710 } \
711 \
712 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
713 } \
714 (void)0
715
716
717
718/* Instruction specification format - work in progress: */
719
720/**
721 * @opcode 0x00
722 * @opmnemonic add
723 * @op1 rm:Eb
724 * @op2 reg:Gb
725 * @opmaps one
726 * @openc ModR/M
727 * @opflmodify cf,pf,af,zf,sf,of
728 * @ophints harmless ignores_op_sizes
729 * @opstats add_Eb_Gb
730 * @opgroup og_gen_arith_bin
731 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
732 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
733 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
734 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
735 */
736FNIEMOP_DEF(iemOp_add_Eb_Gb)
737{
738 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
739 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_add_u8);
740 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked);
741}
742
743
744/**
745 * @opcode 0x01
746 * @opgroup og_gen_arith_bin
747 * @opflmodify cf,pf,af,zf,sf,of
748 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
749 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
750 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
751 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
752 */
753FNIEMOP_DEF(iemOp_add_Ev_Gv)
754{
755 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
756 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
757 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
758}
759
760
761/**
762 * @opcode 0x02
763 * @opgroup og_gen_arith_bin
764 * @opflmodify cf,pf,af,zf,sf,of
765 * @opcopytests iemOp_add_Eb_Gb
766 */
767FNIEMOP_DEF(iemOp_add_Gb_Eb)
768{
769 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
770 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
771}
772
773
774/**
775 * @opcode 0x03
776 * @opgroup og_gen_arith_bin
777 * @opflmodify cf,pf,af,zf,sf,of
778 * @opcopytests iemOp_add_Ev_Gv
779 */
780FNIEMOP_DEF(iemOp_add_Gv_Ev)
781{
782 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
783 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1, 0);
784}
785
786
787/**
788 * @opcode 0x04
789 * @opgroup og_gen_arith_bin
790 * @opflmodify cf,pf,af,zf,sf,of
791 * @opcopytests iemOp_add_Eb_Gb
792 */
793FNIEMOP_DEF(iemOp_add_Al_Ib)
794{
795 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
796 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
797}
798
799
800/**
801 * @opcode 0x05
802 * @opgroup og_gen_arith_bin
803 * @opflmodify cf,pf,af,zf,sf,of
804 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
805 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
806 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
807 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
808 */
809FNIEMOP_DEF(iemOp_add_eAX_Iz)
810{
811 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
812 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
813}
814
815
816/**
817 * @opcode 0x06
818 * @opgroup og_stack_sreg
819 */
820FNIEMOP_DEF(iemOp_push_ES)
821{
822 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
823 IEMOP_HLP_NO_64BIT();
824 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
825}
826
827
828/**
829 * @opcode 0x07
830 * @opgroup og_stack_sreg
831 */
832FNIEMOP_DEF(iemOp_pop_ES)
833{
834 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
835 IEMOP_HLP_NO_64BIT();
836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
837 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
838}
839
840
841/**
842 * @opcode 0x08
843 * @opgroup og_gen_arith_bin
844 * @opflmodify cf,pf,af,zf,sf,of
845 * @opflundef af
846 * @opflclear of,cf
847 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
848 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
849 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
850 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
851 */
852FNIEMOP_DEF(iemOp_or_Eb_Gb)
853{
854 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
855 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
856 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_or_u8);
857 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked);
858}
859
860
861/*
862 * @opcode 0x09
863 * @opgroup og_gen_arith_bin
864 * @opflmodify cf,pf,af,zf,sf,of
865 * @opflundef af
866 * @opflclear of,cf
867 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
868 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
869 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
870 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
871 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
872 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
873 */
874FNIEMOP_DEF(iemOp_or_Ev_Gv)
875{
876 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
877 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
878 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
879 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
880}
881
882
883/**
884 * @opcode 0x0a
885 * @opgroup og_gen_arith_bin
886 * @opflmodify cf,pf,af,zf,sf,of
887 * @opflundef af
888 * @opflclear of,cf
889 * @opcopytests iemOp_or_Eb_Gb
890 */
891FNIEMOP_DEF(iemOp_or_Gb_Eb)
892{
893 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
894 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
895 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
896}
897
898
899/**
900 * @opcode 0x0b
901 * @opgroup og_gen_arith_bin
902 * @opflmodify cf,pf,af,zf,sf,of
903 * @opflundef af
904 * @opflclear of,cf
905 * @opcopytests iemOp_or_Ev_Gv
906 */
907FNIEMOP_DEF(iemOp_or_Gv_Ev)
908{
909 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
910 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
911 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1, 0);
912}
913
914
915/**
916 * @opcode 0x0c
917 * @opgroup og_gen_arith_bin
918 * @opflmodify cf,pf,af,zf,sf,of
919 * @opflundef af
920 * @opflclear of,cf
921 * @opcopytests iemOp_or_Eb_Gb
922 */
923FNIEMOP_DEF(iemOp_or_Al_Ib)
924{
925 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
926 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
927 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
928}
929
930
931/**
932 * @opcode 0x0d
933 * @opgroup og_gen_arith_bin
934 * @opflmodify cf,pf,af,zf,sf,of
935 * @opflundef af
936 * @opflclear of,cf
937 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
938 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
939 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
940 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
941 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
942 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
943 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
944 */
945FNIEMOP_DEF(iemOp_or_eAX_Iz)
946{
947 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
948 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
949 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
950}
951
952
953/**
954 * @opcode 0x0e
955 * @opgroup og_stack_sreg
956 */
957FNIEMOP_DEF(iemOp_push_CS)
958{
959 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
960 IEMOP_HLP_NO_64BIT();
961 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
962}
963
964
965/**
966 * @opcode 0x0f
967 * @opmnemonic EscTwo0f
968 * @openc two0f
969 * @opdisenum OP_2B_ESC
970 * @ophints harmless
971 * @opgroup og_escapes
972 */
973FNIEMOP_DEF(iemOp_2byteEscape)
974{
975#if 0 /// @todo def VBOX_STRICT
976 /* Sanity check the table the first time around. */
977 static bool s_fTested = false;
978 if (RT_LIKELY(s_fTested)) { /* likely */ }
979 else
980 {
981 s_fTested = true;
982 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
983 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
984 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
985 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
986 }
987#endif
988
989 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
990 {
991 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
992 IEMOP_HLP_MIN_286();
993 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
994 }
995 /* @opdone */
996
997 /*
998 * On the 8086 this is a POP CS instruction.
999 * For the time being we don't specify this this.
1000 */
1001 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
1002 IEMOP_HLP_NO_64BIT();
1003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1004 /** @todo eliminate END_TB here */
1005 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1006 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1007}
1008
1009/**
1010 * @opcode 0x10
1011 * @opgroup og_gen_arith_bin
1012 * @opfltest cf
1013 * @opflmodify cf,pf,af,zf,sf,of
1014 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1015 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1016 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1017 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1018 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1019 */
1020FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1021{
1022 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1023 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_adc_u8);
1024 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked);
1025}
1026
1027
1028/**
1029 * @opcode 0x11
1030 * @opgroup og_gen_arith_bin
1031 * @opfltest cf
1032 * @opflmodify cf,pf,af,zf,sf,of
1033 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1034 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1035 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1036 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1037 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1038 */
1039FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1040{
1041 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1042 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
1043 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1044}
1045
1046
1047/**
1048 * @opcode 0x12
1049 * @opgroup og_gen_arith_bin
1050 * @opfltest cf
1051 * @opflmodify cf,pf,af,zf,sf,of
1052 * @opcopytests iemOp_adc_Eb_Gb
1053 */
1054FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1055{
1056 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1057 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
1058}
1059
1060
1061/**
1062 * @opcode 0x13
1063 * @opgroup og_gen_arith_bin
1064 * @opfltest cf
1065 * @opflmodify cf,pf,af,zf,sf,of
1066 * @opcopytests iemOp_adc_Ev_Gv
1067 */
1068FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1069{
1070 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1071 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1, 0);
1072}
1073
1074
1075/**
1076 * @opcode 0x14
1077 * @opgroup og_gen_arith_bin
1078 * @opfltest cf
1079 * @opflmodify cf,pf,af,zf,sf,of
1080 * @opcopytests iemOp_adc_Eb_Gb
1081 */
1082FNIEMOP_DEF(iemOp_adc_Al_Ib)
1083{
1084 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1085 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1086}
1087
1088
1089/**
1090 * @opcode 0x15
1091 * @opgroup og_gen_arith_bin
1092 * @opfltest cf
1093 * @opflmodify cf,pf,af,zf,sf,of
1094 * @opcopytests iemOp_adc_Ev_Gv
1095 */
1096FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1097{
1098 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1099 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1100}
1101
1102
1103/**
1104 * @opcode 0x16
1105 */
1106FNIEMOP_DEF(iemOp_push_SS)
1107{
1108 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1109 IEMOP_HLP_NO_64BIT();
1110 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1111}
1112
1113
1114/**
1115 * @opcode 0x17
1116 * @opgroup og_gen_arith_bin
1117 * @opfltest cf
1118 * @opflmodify cf,pf,af,zf,sf,of
1119 */
1120FNIEMOP_DEF(iemOp_pop_SS)
1121{
1122 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1124 IEMOP_HLP_NO_64BIT();
1125 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1126}
1127
1128
1129/**
1130 * @opcode 0x18
1131 * @opgroup og_gen_arith_bin
1132 * @opfltest cf
1133 * @opflmodify cf,pf,af,zf,sf,of
1134 */
1135FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1136{
1137 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1138 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sbb_u8);
1139 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked);
1140}
1141
1142
1143/**
1144 * @opcode 0x19
1145 * @opgroup og_gen_arith_bin
1146 * @opfltest cf
1147 * @opflmodify cf,pf,af,zf,sf,of
1148 */
1149FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1150{
1151 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1152 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
1153 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1154}
1155
1156
1157/**
1158 * @opcode 0x1a
1159 * @opgroup og_gen_arith_bin
1160 * @opfltest cf
1161 * @opflmodify cf,pf,af,zf,sf,of
1162 */
1163FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1164{
1165 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1166 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
1167}
1168
1169
1170/**
1171 * @opcode 0x1b
1172 * @opgroup og_gen_arith_bin
1173 * @opfltest cf
1174 * @opflmodify cf,pf,af,zf,sf,of
1175 */
1176FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1177{
1178 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1179 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1, 0);
1180}
1181
1182
1183/**
1184 * @opcode 0x1c
1185 * @opgroup og_gen_arith_bin
1186 * @opfltest cf
1187 * @opflmodify cf,pf,af,zf,sf,of
1188 */
1189FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1190{
1191 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1192 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1193}
1194
1195
1196/**
1197 * @opcode 0x1d
1198 * @opgroup og_gen_arith_bin
1199 * @opfltest cf
1200 * @opflmodify cf,pf,af,zf,sf,of
1201 */
1202FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1203{
1204 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1205 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1206}
1207
1208
1209/**
1210 * @opcode 0x1e
1211 * @opgroup og_stack_sreg
1212 */
1213FNIEMOP_DEF(iemOp_push_DS)
1214{
1215 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1216 IEMOP_HLP_NO_64BIT();
1217 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1218}
1219
1220
1221/**
1222 * @opcode 0x1f
1223 * @opgroup og_stack_sreg
1224 */
1225FNIEMOP_DEF(iemOp_pop_DS)
1226{
1227 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1229 IEMOP_HLP_NO_64BIT();
1230 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1231}
1232
1233
1234/**
1235 * @opcode 0x20
1236 * @opgroup og_gen_arith_bin
1237 * @opflmodify cf,pf,af,zf,sf,of
1238 * @opflundef af
1239 * @opflclear of,cf
1240 */
1241FNIEMOP_DEF(iemOp_and_Eb_Gb)
1242{
1243 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1244 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1245 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_and_u8);
1246 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked);
1247}
1248
1249
1250/**
1251 * @opcode 0x21
1252 * @opgroup og_gen_arith_bin
1253 * @opflmodify cf,pf,af,zf,sf,of
1254 * @opflundef af
1255 * @opflclear of,cf
1256 */
1257FNIEMOP_DEF(iemOp_and_Ev_Gv)
1258{
1259 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1260 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1261 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
1262 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1263}
1264
1265
1266/**
1267 * @opcode 0x22
1268 * @opgroup og_gen_arith_bin
1269 * @opflmodify cf,pf,af,zf,sf,of
1270 * @opflundef af
1271 * @opflclear of,cf
1272 */
1273FNIEMOP_DEF(iemOp_and_Gb_Eb)
1274{
1275 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1276 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1277 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1278}
1279
1280
1281/**
1282 * @opcode 0x23
1283 * @opgroup og_gen_arith_bin
1284 * @opflmodify cf,pf,af,zf,sf,of
1285 * @opflundef af
1286 * @opflclear of,cf
1287 */
1288FNIEMOP_DEF(iemOp_and_Gv_Ev)
1289{
1290 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1291 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1292 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1, 0);
1293}
1294
1295
1296/**
1297 * @opcode 0x24
1298 * @opgroup og_gen_arith_bin
1299 * @opflmodify cf,pf,af,zf,sf,of
1300 * @opflundef af
1301 * @opflclear of,cf
1302 */
1303FNIEMOP_DEF(iemOp_and_Al_Ib)
1304{
1305 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1306 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1307 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1308}
1309
1310
1311/**
1312 * @opcode 0x25
1313 * @opgroup og_gen_arith_bin
1314 * @opflmodify cf,pf,af,zf,sf,of
1315 * @opflundef af
1316 * @opflclear of,cf
1317 */
1318FNIEMOP_DEF(iemOp_and_eAX_Iz)
1319{
1320 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1321 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1322 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1323}
1324
1325
1326/**
1327 * @opcode 0x26
1328 * @opmnemonic SEG
1329 * @op1 ES
1330 * @opgroup og_prefix
1331 * @openc prefix
1332 * @opdisenum OP_SEG
1333 * @ophints harmless
1334 */
1335FNIEMOP_DEF(iemOp_seg_ES)
1336{
1337 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1338 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1339 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1340
1341 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1342 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1343}
1344
1345
1346/**
1347 * @opcode 0x27
1348 * @opfltest af,cf
1349 * @opflmodify cf,pf,af,zf,sf,of
1350 * @opflundef of
1351 */
1352FNIEMOP_DEF(iemOp_daa)
1353{
1354 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1355 IEMOP_HLP_NO_64BIT();
1356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1357 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1358 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_daa);
1359}
1360
1361
1362/**
1363 * @opcode 0x28
1364 * @opgroup og_gen_arith_bin
1365 * @opflmodify cf,pf,af,zf,sf,of
1366 */
1367FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1368{
1369 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1370 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sub_u8);
1371 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked);
1372}
1373
1374
1375/**
1376 * @opcode 0x29
1377 * @opgroup og_gen_arith_bin
1378 * @opflmodify cf,pf,af,zf,sf,of
1379 */
1380FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1381{
1382 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1383 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
1384 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1385}
1386
1387
1388/**
1389 * @opcode 0x2a
1390 * @opgroup og_gen_arith_bin
1391 * @opflmodify cf,pf,af,zf,sf,of
1392 */
1393FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1394{
1395 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1396 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1397}
1398
1399
1400/**
1401 * @opcode 0x2b
1402 * @opgroup og_gen_arith_bin
1403 * @opflmodify cf,pf,af,zf,sf,of
1404 */
1405FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1406{
1407 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1408 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1, 0);
1409}
1410
1411
1412/**
1413 * @opcode 0x2c
1414 * @opgroup og_gen_arith_bin
1415 * @opflmodify cf,pf,af,zf,sf,of
1416 */
1417FNIEMOP_DEF(iemOp_sub_Al_Ib)
1418{
1419 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1420 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1421}
1422
1423
1424/**
1425 * @opcode 0x2d
1426 * @opgroup og_gen_arith_bin
1427 * @opflmodify cf,pf,af,zf,sf,of
1428 */
1429FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1430{
1431 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1432 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1433}
1434
1435
1436/**
1437 * @opcode 0x2e
1438 * @opmnemonic SEG
1439 * @op1 CS
1440 * @opgroup og_prefix
1441 * @openc prefix
1442 * @opdisenum OP_SEG
1443 * @ophints harmless
1444 */
1445FNIEMOP_DEF(iemOp_seg_CS)
1446{
1447 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1448 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1449 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1450
1451 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1452 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1453}
1454
1455
1456/**
1457 * @opcode 0x2f
1458 * @opfltest af,cf
1459 * @opflmodify cf,pf,af,zf,sf,of
1460 * @opflundef of
1461 */
1462FNIEMOP_DEF(iemOp_das)
1463{
1464 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1465 IEMOP_HLP_NO_64BIT();
1466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1467 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1468 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_das);
1469}
1470
1471
1472/**
1473 * @opcode 0x30
1474 * @opgroup og_gen_arith_bin
1475 * @opflmodify cf,pf,af,zf,sf,of
1476 * @opflundef af
1477 * @opflclear of,cf
1478 */
1479FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1480{
1481 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1482 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1483 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_xor_u8);
1484 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked);
1485}
1486
1487
1488/**
1489 * @opcode 0x31
1490 * @opgroup og_gen_arith_bin
1491 * @opflmodify cf,pf,af,zf,sf,of
1492 * @opflundef af
1493 * @opflclear of,cf
1494 */
1495FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1496{
1497 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1498 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1499 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
1500 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1501}
1502
1503
1504/**
1505 * @opcode 0x32
1506 * @opgroup og_gen_arith_bin
1507 * @opflmodify cf,pf,af,zf,sf,of
1508 * @opflundef af
1509 * @opflclear of,cf
1510 */
1511FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1512{
1513 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1514 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1515 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1516}
1517
1518
1519/**
1520 * @opcode 0x33
1521 * @opgroup og_gen_arith_bin
1522 * @opflmodify cf,pf,af,zf,sf,of
1523 * @opflundef af
1524 * @opflclear of,cf
1525 */
1526FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1527{
1528 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1529 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1530 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1, 0);
1531}
1532
1533
1534/**
1535 * @opcode 0x34
1536 * @opgroup og_gen_arith_bin
1537 * @opflmodify cf,pf,af,zf,sf,of
1538 * @opflundef af
1539 * @opflclear of,cf
1540 */
1541FNIEMOP_DEF(iemOp_xor_Al_Ib)
1542{
1543 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1544 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1545 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1546}
1547
1548
1549/**
1550 * @opcode 0x35
1551 * @opgroup og_gen_arith_bin
1552 * @opflmodify cf,pf,af,zf,sf,of
1553 * @opflundef af
1554 * @opflclear of,cf
1555 */
1556FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1557{
1558 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1559 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1560 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1561}
1562
1563
1564/**
1565 * @opcode 0x36
1566 * @opmnemonic SEG
1567 * @op1 SS
1568 * @opgroup og_prefix
1569 * @openc prefix
1570 * @opdisenum OP_SEG
1571 * @ophints harmless
1572 */
1573FNIEMOP_DEF(iemOp_seg_SS)
1574{
1575 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1576 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1577 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1578
1579 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1580 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1581}
1582
1583
1584/**
1585 * @opcode 0x37
1586 * @opfltest af,cf
1587 * @opflmodify cf,pf,af,zf,sf,of
1588 * @opflundef pf,zf,sf,of
1589 * @opgroup og_gen_arith_dec
1590 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1591 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1592 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1593 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1594 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1595 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1596 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1597 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1598 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1599 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1600 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1601 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1602 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1603 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1604 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1605 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1606 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1607 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1608 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1609 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1610 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1611 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1612 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1613 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1614 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1615 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1616 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1617 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1618 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1619 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1620 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1621 */
1622FNIEMOP_DEF(iemOp_aaa)
1623{
1624 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1625 IEMOP_HLP_NO_64BIT();
1626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1627 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1628
1629 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aaa);
1630}
1631
1632
1633/**
1634 * @opcode 0x38
1635 */
1636FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1637{
1638 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1639 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8);
1640 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
1641}
1642
1643
1644/**
1645 * @opcode 0x39
1646 */
1647FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1648{
1649 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1650 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
1651}
1652
1653
1654/**
1655 * @opcode 0x3a
1656 */
1657FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1658{
1659 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1660 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1661}
1662
1663
1664/**
1665 * @opcode 0x3b
1666 */
1667FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1668{
1669 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1670 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0, 0);
1671}
1672
1673
1674/**
1675 * @opcode 0x3c
1676 */
1677FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1678{
1679 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1680 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1681}
1682
1683
1684/**
1685 * @opcode 0x3d
1686 */
1687FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1688{
1689 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1690 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1691}
1692
1693
1694/**
1695 * @opcode 0x3e
1696 */
1697FNIEMOP_DEF(iemOp_seg_DS)
1698{
1699 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1700 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1701 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1702
1703 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1704 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1705}
1706
1707
1708/**
1709 * @opcode 0x3f
1710 * @opfltest af,cf
1711 * @opflmodify cf,pf,af,zf,sf,of
1712 * @opflundef pf,zf,sf,of
1713 * @opgroup og_gen_arith_dec
1714 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1715 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1716 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1717 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1718 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1719 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1720 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1721 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1722 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1723 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1724 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1725 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1726 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1727 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1728 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1729 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1730 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1731 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1732 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1733 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1734 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1735 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1736 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1737 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1738 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1739 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1740 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1741 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1742 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1743 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1744 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1745 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1746 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1747 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1748 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1749 */
1750FNIEMOP_DEF(iemOp_aas)
1751{
1752 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1753 IEMOP_HLP_NO_64BIT();
1754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1755 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1756
1757 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aas);
1758}
1759
1760
1761/**
1762 * Common 'inc/dec register' helper.
1763 *
1764 * Not for 64-bit code, only for what became the rex prefixes.
1765 */
1766#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1767 switch (pVCpu->iem.s.enmEffOpSize) \
1768 { \
1769 case IEMMODE_16BIT: \
1770 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT); \
1771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1772 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1773 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1774 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1775 IEM_MC_REF_EFLAGS(pEFlags); \
1776 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1777 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1778 IEM_MC_END(); \
1779 break; \
1780 \
1781 case IEMMODE_32BIT: \
1782 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); \
1783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1784 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1785 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1786 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1787 IEM_MC_REF_EFLAGS(pEFlags); \
1788 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1789 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
1790 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1791 IEM_MC_END(); \
1792 break; \
1793 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1794 } \
1795 (void)0
1796
1797/**
1798 * @opcode 0x40
1799 */
1800FNIEMOP_DEF(iemOp_inc_eAX)
1801{
1802 /*
1803 * This is a REX prefix in 64-bit mode.
1804 */
1805 if (IEM_IS_64BIT_CODE(pVCpu))
1806 {
1807 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1808 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1809
1810 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1811 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1812 }
1813
1814 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1815 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1816}
1817
1818
1819/**
1820 * @opcode 0x41
1821 */
1822FNIEMOP_DEF(iemOp_inc_eCX)
1823{
1824 /*
1825 * This is a REX prefix in 64-bit mode.
1826 */
1827 if (IEM_IS_64BIT_CODE(pVCpu))
1828 {
1829 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1830 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1831 pVCpu->iem.s.uRexB = 1 << 3;
1832
1833 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1834 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1835 }
1836
1837 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1838 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1839}
1840
1841
1842/**
1843 * @opcode 0x42
1844 */
1845FNIEMOP_DEF(iemOp_inc_eDX)
1846{
1847 /*
1848 * This is a REX prefix in 64-bit mode.
1849 */
1850 if (IEM_IS_64BIT_CODE(pVCpu))
1851 {
1852 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1853 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1854 pVCpu->iem.s.uRexIndex = 1 << 3;
1855
1856 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1857 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1858 }
1859
1860 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1861 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1862}
1863
1864
1865
1866/**
1867 * @opcode 0x43
1868 */
1869FNIEMOP_DEF(iemOp_inc_eBX)
1870{
1871 /*
1872 * This is a REX prefix in 64-bit mode.
1873 */
1874 if (IEM_IS_64BIT_CODE(pVCpu))
1875 {
1876 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1877 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1878 pVCpu->iem.s.uRexB = 1 << 3;
1879 pVCpu->iem.s.uRexIndex = 1 << 3;
1880
1881 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1882 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1883 }
1884
1885 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1886 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1887}
1888
1889
1890/**
1891 * @opcode 0x44
1892 */
1893FNIEMOP_DEF(iemOp_inc_eSP)
1894{
1895 /*
1896 * This is a REX prefix in 64-bit mode.
1897 */
1898 if (IEM_IS_64BIT_CODE(pVCpu))
1899 {
1900 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1901 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1902 pVCpu->iem.s.uRexReg = 1 << 3;
1903
1904 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1905 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1906 }
1907
1908 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1909 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1910}
1911
1912
1913/**
1914 * @opcode 0x45
1915 */
1916FNIEMOP_DEF(iemOp_inc_eBP)
1917{
1918 /*
1919 * This is a REX prefix in 64-bit mode.
1920 */
1921 if (IEM_IS_64BIT_CODE(pVCpu))
1922 {
1923 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1924 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1925 pVCpu->iem.s.uRexReg = 1 << 3;
1926 pVCpu->iem.s.uRexB = 1 << 3;
1927
1928 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1929 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1930 }
1931
1932 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1933 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1934}
1935
1936
1937/**
1938 * @opcode 0x46
1939 */
1940FNIEMOP_DEF(iemOp_inc_eSI)
1941{
1942 /*
1943 * This is a REX prefix in 64-bit mode.
1944 */
1945 if (IEM_IS_64BIT_CODE(pVCpu))
1946 {
1947 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1948 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1949 pVCpu->iem.s.uRexReg = 1 << 3;
1950 pVCpu->iem.s.uRexIndex = 1 << 3;
1951
1952 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1953 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1954 }
1955
1956 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1957 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1958}
1959
1960
1961/**
1962 * @opcode 0x47
1963 */
1964FNIEMOP_DEF(iemOp_inc_eDI)
1965{
1966 /*
1967 * This is a REX prefix in 64-bit mode.
1968 */
1969 if (IEM_IS_64BIT_CODE(pVCpu))
1970 {
1971 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1972 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1973 pVCpu->iem.s.uRexReg = 1 << 3;
1974 pVCpu->iem.s.uRexB = 1 << 3;
1975 pVCpu->iem.s.uRexIndex = 1 << 3;
1976
1977 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1978 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1979 }
1980
1981 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1982 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
1983}
1984
1985
1986/**
1987 * @opcode 0x48
1988 */
1989FNIEMOP_DEF(iemOp_dec_eAX)
1990{
1991 /*
1992 * This is a REX prefix in 64-bit mode.
1993 */
1994 if (IEM_IS_64BIT_CODE(pVCpu))
1995 {
1996 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1997 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1998 iemRecalEffOpSize(pVCpu);
1999
2000 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2001 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2002 }
2003
2004 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2005 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2006}
2007
2008
2009/**
2010 * @opcode 0x49
2011 */
2012FNIEMOP_DEF(iemOp_dec_eCX)
2013{
2014 /*
2015 * This is a REX prefix in 64-bit mode.
2016 */
2017 if (IEM_IS_64BIT_CODE(pVCpu))
2018 {
2019 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2020 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2021 pVCpu->iem.s.uRexB = 1 << 3;
2022 iemRecalEffOpSize(pVCpu);
2023
2024 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2025 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2026 }
2027
2028 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2029 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2030}
2031
2032
2033/**
2034 * @opcode 0x4a
2035 */
2036FNIEMOP_DEF(iemOp_dec_eDX)
2037{
2038 /*
2039 * This is a REX prefix in 64-bit mode.
2040 */
2041 if (IEM_IS_64BIT_CODE(pVCpu))
2042 {
2043 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2044 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2045 pVCpu->iem.s.uRexIndex = 1 << 3;
2046 iemRecalEffOpSize(pVCpu);
2047
2048 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2049 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2050 }
2051
2052 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2053 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2054}
2055
2056
2057/**
2058 * @opcode 0x4b
2059 */
2060FNIEMOP_DEF(iemOp_dec_eBX)
2061{
2062 /*
2063 * This is a REX prefix in 64-bit mode.
2064 */
2065 if (IEM_IS_64BIT_CODE(pVCpu))
2066 {
2067 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2068 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2069 pVCpu->iem.s.uRexB = 1 << 3;
2070 pVCpu->iem.s.uRexIndex = 1 << 3;
2071 iemRecalEffOpSize(pVCpu);
2072
2073 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2074 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2075 }
2076
2077 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2078 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2079}
2080
2081
2082/**
2083 * @opcode 0x4c
2084 */
2085FNIEMOP_DEF(iemOp_dec_eSP)
2086{
2087 /*
2088 * This is a REX prefix in 64-bit mode.
2089 */
2090 if (IEM_IS_64BIT_CODE(pVCpu))
2091 {
2092 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2093 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2094 pVCpu->iem.s.uRexReg = 1 << 3;
2095 iemRecalEffOpSize(pVCpu);
2096
2097 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2098 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2099 }
2100
2101 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2102 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2103}
2104
2105
2106/**
2107 * @opcode 0x4d
2108 */
2109FNIEMOP_DEF(iemOp_dec_eBP)
2110{
2111 /*
2112 * This is a REX prefix in 64-bit mode.
2113 */
2114 if (IEM_IS_64BIT_CODE(pVCpu))
2115 {
2116 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2117 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2118 pVCpu->iem.s.uRexReg = 1 << 3;
2119 pVCpu->iem.s.uRexB = 1 << 3;
2120 iemRecalEffOpSize(pVCpu);
2121
2122 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2123 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2124 }
2125
2126 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2127 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2128}
2129
2130
2131/**
2132 * @opcode 0x4e
2133 */
2134FNIEMOP_DEF(iemOp_dec_eSI)
2135{
2136 /*
2137 * This is a REX prefix in 64-bit mode.
2138 */
2139 if (IEM_IS_64BIT_CODE(pVCpu))
2140 {
2141 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2142 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2143 pVCpu->iem.s.uRexReg = 1 << 3;
2144 pVCpu->iem.s.uRexIndex = 1 << 3;
2145 iemRecalEffOpSize(pVCpu);
2146
2147 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2148 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2149 }
2150
2151 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2152 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2153}
2154
2155
2156/**
2157 * @opcode 0x4f
2158 */
2159FNIEMOP_DEF(iemOp_dec_eDI)
2160{
2161 /*
2162 * This is a REX prefix in 64-bit mode.
2163 */
2164 if (IEM_IS_64BIT_CODE(pVCpu))
2165 {
2166 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2167 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2168 pVCpu->iem.s.uRexReg = 1 << 3;
2169 pVCpu->iem.s.uRexB = 1 << 3;
2170 pVCpu->iem.s.uRexIndex = 1 << 3;
2171 iemRecalEffOpSize(pVCpu);
2172
2173 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2174 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2175 }
2176
2177 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2178 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2179}
2180
2181
2182/**
2183 * Common 'push register' helper.
2184 */
2185FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2186{
2187 if (IEM_IS_64BIT_CODE(pVCpu))
2188 {
2189 iReg |= pVCpu->iem.s.uRexB;
2190 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2191 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2192 }
2193
2194 switch (pVCpu->iem.s.enmEffOpSize)
2195 {
2196 case IEMMODE_16BIT:
2197 IEM_MC_BEGIN(0, 1, 0);
2198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2199 IEM_MC_LOCAL(uint16_t, u16Value);
2200 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2201 IEM_MC_PUSH_U16(u16Value);
2202 IEM_MC_ADVANCE_RIP_AND_FINISH();
2203 IEM_MC_END();
2204 break;
2205
2206 case IEMMODE_32BIT:
2207 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT);
2208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2209 IEM_MC_LOCAL(uint32_t, u32Value);
2210 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2211 IEM_MC_PUSH_U32(u32Value);
2212 IEM_MC_ADVANCE_RIP_AND_FINISH();
2213 IEM_MC_END();
2214 break;
2215
2216 case IEMMODE_64BIT:
2217 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
2218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2219 IEM_MC_LOCAL(uint64_t, u64Value);
2220 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2221 IEM_MC_PUSH_U64(u64Value);
2222 IEM_MC_ADVANCE_RIP_AND_FINISH();
2223 IEM_MC_END();
2224 break;
2225
2226 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2227 }
2228}
2229
2230
2231/**
2232 * @opcode 0x50
2233 */
2234FNIEMOP_DEF(iemOp_push_eAX)
2235{
2236 IEMOP_MNEMONIC(push_rAX, "push rAX");
2237 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2238}
2239
2240
2241/**
2242 * @opcode 0x51
2243 */
2244FNIEMOP_DEF(iemOp_push_eCX)
2245{
2246 IEMOP_MNEMONIC(push_rCX, "push rCX");
2247 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2248}
2249
2250
2251/**
2252 * @opcode 0x52
2253 */
2254FNIEMOP_DEF(iemOp_push_eDX)
2255{
2256 IEMOP_MNEMONIC(push_rDX, "push rDX");
2257 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2258}
2259
2260
2261/**
2262 * @opcode 0x53
2263 */
2264FNIEMOP_DEF(iemOp_push_eBX)
2265{
2266 IEMOP_MNEMONIC(push_rBX, "push rBX");
2267 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2268}
2269
2270
2271/**
2272 * @opcode 0x54
2273 */
2274FNIEMOP_DEF(iemOp_push_eSP)
2275{
2276 IEMOP_MNEMONIC(push_rSP, "push rSP");
2277 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
2278 {
2279 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086);
2280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2281 IEM_MC_LOCAL(uint16_t, u16Value);
2282 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2283 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2284 IEM_MC_PUSH_U16(u16Value);
2285 IEM_MC_ADVANCE_RIP_AND_FINISH();
2286 IEM_MC_END();
2287 }
2288 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2289}
2290
2291
2292/**
2293 * @opcode 0x55
2294 */
2295FNIEMOP_DEF(iemOp_push_eBP)
2296{
2297 IEMOP_MNEMONIC(push_rBP, "push rBP");
2298 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2299}
2300
2301
2302/**
2303 * @opcode 0x56
2304 */
2305FNIEMOP_DEF(iemOp_push_eSI)
2306{
2307 IEMOP_MNEMONIC(push_rSI, "push rSI");
2308 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2309}
2310
2311
2312/**
2313 * @opcode 0x57
2314 */
2315FNIEMOP_DEF(iemOp_push_eDI)
2316{
2317 IEMOP_MNEMONIC(push_rDI, "push rDI");
2318 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2319}
2320
2321
2322/**
2323 * Common 'pop register' helper.
2324 */
2325FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2326{
2327 if (IEM_IS_64BIT_CODE(pVCpu))
2328 {
2329 iReg |= pVCpu->iem.s.uRexB;
2330 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2331 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2332 }
2333
2334 switch (pVCpu->iem.s.enmEffOpSize)
2335 {
2336 case IEMMODE_16BIT:
2337 IEM_MC_BEGIN(0, 1, 0);
2338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2339 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2340 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
2341 IEM_MC_POP_U16(pu16Dst);
2342 IEM_MC_ADVANCE_RIP_AND_FINISH();
2343 IEM_MC_END();
2344 break;
2345
2346 case IEMMODE_32BIT:
2347 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT);
2348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2349 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2350 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
2351 IEM_MC_POP_U32(pu32Dst);
2352 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
2353 IEM_MC_ADVANCE_RIP_AND_FINISH();
2354 IEM_MC_END();
2355 break;
2356
2357 case IEMMODE_64BIT:
2358 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
2359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2360 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2361 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
2362 IEM_MC_POP_U64(pu64Dst);
2363 IEM_MC_ADVANCE_RIP_AND_FINISH();
2364 IEM_MC_END();
2365 break;
2366
2367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2368 }
2369}
2370
2371
2372/**
2373 * @opcode 0x58
2374 */
2375FNIEMOP_DEF(iemOp_pop_eAX)
2376{
2377 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2378 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2379}
2380
2381
2382/**
2383 * @opcode 0x59
2384 */
2385FNIEMOP_DEF(iemOp_pop_eCX)
2386{
2387 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2388 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2389}
2390
2391
2392/**
2393 * @opcode 0x5a
2394 */
2395FNIEMOP_DEF(iemOp_pop_eDX)
2396{
2397 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2398 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2399}
2400
2401
2402/**
2403 * @opcode 0x5b
2404 */
2405FNIEMOP_DEF(iemOp_pop_eBX)
2406{
2407 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2408 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2409}
2410
2411
2412/**
2413 * @opcode 0x5c
2414 */
2415FNIEMOP_DEF(iemOp_pop_eSP)
2416{
2417 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2418 if (IEM_IS_64BIT_CODE(pVCpu))
2419 {
2420 if (pVCpu->iem.s.uRexB)
2421 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2422 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2423 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2424 }
2425
2426 /** @todo add testcase for this instruction. */
2427 switch (pVCpu->iem.s.enmEffOpSize)
2428 {
2429 case IEMMODE_16BIT:
2430 IEM_MC_BEGIN(0, 1, 0);
2431 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2432 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2433 IEM_MC_LOCAL(uint16_t, u16Dst);
2434 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
2435 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
2436 IEM_MC_ADVANCE_RIP_AND_FINISH();
2437 IEM_MC_END();
2438 break;
2439
2440 case IEMMODE_32BIT:
2441 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
2442 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2443 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2444 IEM_MC_LOCAL(uint32_t, u32Dst);
2445 IEM_MC_POP_U32(&u32Dst);
2446 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
2447 IEM_MC_ADVANCE_RIP_AND_FINISH();
2448 IEM_MC_END();
2449 break;
2450
2451 case IEMMODE_64BIT:
2452 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
2453 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2454 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2455 IEM_MC_LOCAL(uint64_t, u64Dst);
2456 IEM_MC_POP_U64(&u64Dst);
2457 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
2458 IEM_MC_ADVANCE_RIP_AND_FINISH();
2459 IEM_MC_END();
2460 break;
2461
2462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2463 }
2464}
2465
2466
2467/**
2468 * @opcode 0x5d
2469 */
2470FNIEMOP_DEF(iemOp_pop_eBP)
2471{
2472 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2473 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2474}
2475
2476
2477/**
2478 * @opcode 0x5e
2479 */
2480FNIEMOP_DEF(iemOp_pop_eSI)
2481{
2482 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2483 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2484}
2485
2486
2487/**
2488 * @opcode 0x5f
2489 */
2490FNIEMOP_DEF(iemOp_pop_eDI)
2491{
2492 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2493 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2494}
2495
2496
2497/**
2498 * @opcode 0x60
2499 */
2500FNIEMOP_DEF(iemOp_pusha)
2501{
2502 IEMOP_MNEMONIC(pusha, "pusha");
2503 IEMOP_HLP_MIN_186();
2504 IEMOP_HLP_NO_64BIT();
2505 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2506 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_16);
2507 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2508 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_32);
2509}
2510
2511
2512/**
2513 * @opcode 0x61
2514 */
2515FNIEMOP_DEF(iemOp_popa__mvex)
2516{
2517 if (!IEM_IS_64BIT_CODE(pVCpu))
2518 {
2519 IEMOP_MNEMONIC(popa, "popa");
2520 IEMOP_HLP_MIN_186();
2521 IEMOP_HLP_NO_64BIT();
2522 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2523 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_popa_16);
2524 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2525 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_popa_32);
2526 }
2527 IEMOP_MNEMONIC(mvex, "mvex");
2528 Log(("mvex prefix is not supported!\n"));
2529 IEMOP_RAISE_INVALID_OPCODE_RET();
2530}
2531
2532
2533/**
2534 * @opcode 0x62
2535 * @opmnemonic bound
2536 * @op1 Gv_RO
2537 * @op2 Ma
2538 * @opmincpu 80186
2539 * @ophints harmless x86_invalid_64
2540 * @optest op1=0 op2=0 ->
2541 * @optest op1=1 op2=0 -> value.xcpt=5
2542 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2543 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2544 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2545 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2546 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2547 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2548 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2549 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2550 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2551 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2552 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2553 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2554 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2555 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2556 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2557 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2558 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2559 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2560 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2561 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2562 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2563 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2564 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2565 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2566 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2567 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2568 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2569 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2570 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2571 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2572 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2573 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2574 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2575 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2576 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2577 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2578 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2579 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2580 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2581 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2582 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2583 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2584 */
2585FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2586{
2587 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2588 compatability mode it is invalid with MOD=3.
2589
2590 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2591 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2592 given as R and X without an exact description, so we assume it builds on
2593 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2594 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2595 uint8_t bRm;
2596 if (!IEM_IS_64BIT_CODE(pVCpu))
2597 {
2598 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2599 IEMOP_HLP_MIN_186();
2600 IEM_OPCODE_GET_NEXT_U8(&bRm);
2601 if (IEM_IS_MODRM_MEM_MODE(bRm))
2602 {
2603 /** @todo testcase: check that there are two memory accesses involved. Check
2604 * whether they're both read before the \#BR triggers. */
2605 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2606 {
2607 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT);
2608 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2609 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2610 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2612
2613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2615
2616 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2617 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2618 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2619
2620 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2621 IEM_MC_END();
2622 }
2623 else /* 32-bit operands */
2624 {
2625 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT);
2626 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2627 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2628 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2630
2631 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2633
2634 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2635 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2636 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2637
2638 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2639 IEM_MC_END();
2640 }
2641 }
2642
2643 /*
2644 * @opdone
2645 */
2646 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2647 {
2648 /* Note that there is no need for the CPU to fetch further bytes
2649 here because MODRM.MOD == 3. */
2650 Log(("evex not supported by the guest CPU!\n"));
2651 IEMOP_RAISE_INVALID_OPCODE_RET();
2652 }
2653 }
2654 else
2655 {
2656 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2657 * does modr/m read, whereas AMD probably doesn't... */
2658 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2659 {
2660 Log(("evex not supported by the guest CPU!\n"));
2661 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2662 }
2663 IEM_OPCODE_GET_NEXT_U8(&bRm);
2664 }
2665
2666 IEMOP_MNEMONIC(evex, "evex");
2667 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2668 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2669 Log(("evex prefix is not implemented!\n"));
2670 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2671}
2672
2673
2674/** Opcode 0x63 - non-64-bit modes. */
2675FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2676{
2677 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2678 IEMOP_HLP_MIN_286();
2679 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2681
2682 if (IEM_IS_MODRM_REG_MODE(bRm))
2683 {
2684 /* Register */
2685 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT);
2686 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2687 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2688 IEM_MC_ARG(uint16_t, u16Src, 1);
2689 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2690
2691 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2692 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2693 IEM_MC_REF_EFLAGS(pEFlags);
2694 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2695
2696 IEM_MC_ADVANCE_RIP_AND_FINISH();
2697 IEM_MC_END();
2698 }
2699 else
2700 {
2701 /* Memory */
2702 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT);
2703 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2704 IEM_MC_ARG(uint16_t, u16Src, 1);
2705 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2707 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2708
2709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2710 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2711 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2712 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2713 IEM_MC_FETCH_EFLAGS(EFlags);
2714 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2715
2716 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
2717 IEM_MC_COMMIT_EFLAGS(EFlags);
2718 IEM_MC_ADVANCE_RIP_AND_FINISH();
2719 IEM_MC_END();
2720 }
2721}
2722
2723
2724/**
2725 * @opcode 0x63
2726 *
2727 * @note This is a weird one. It works like a regular move instruction if
2728 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2729 * @todo This definitely needs a testcase to verify the odd cases. */
2730FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2731{
2732 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2733
2734 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2736
2737 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2738 {
2739 if (IEM_IS_MODRM_REG_MODE(bRm))
2740 {
2741 /*
2742 * Register to register.
2743 */
2744 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
2745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2746 IEM_MC_LOCAL(uint64_t, u64Value);
2747 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2748 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2749 IEM_MC_ADVANCE_RIP_AND_FINISH();
2750 IEM_MC_END();
2751 }
2752 else
2753 {
2754 /*
2755 * We're loading a register from memory.
2756 */
2757 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
2758 IEM_MC_LOCAL(uint64_t, u64Value);
2759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2762 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2763 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2764 IEM_MC_ADVANCE_RIP_AND_FINISH();
2765 IEM_MC_END();
2766 }
2767 }
2768 else
2769 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2770}
2771
2772
2773/**
2774 * @opcode 0x64
2775 * @opmnemonic segfs
2776 * @opmincpu 80386
2777 * @opgroup og_prefixes
2778 */
2779FNIEMOP_DEF(iemOp_seg_FS)
2780{
2781 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2782 IEMOP_HLP_MIN_386();
2783
2784 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2785 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2786
2787 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2788 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2789}
2790
2791
2792/**
2793 * @opcode 0x65
2794 * @opmnemonic seggs
2795 * @opmincpu 80386
2796 * @opgroup og_prefixes
2797 */
2798FNIEMOP_DEF(iemOp_seg_GS)
2799{
2800 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2801 IEMOP_HLP_MIN_386();
2802
2803 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2804 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2805
2806 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2807 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2808}
2809
2810
2811/**
2812 * @opcode 0x66
2813 * @opmnemonic opsize
2814 * @openc prefix
2815 * @opmincpu 80386
2816 * @ophints harmless
2817 * @opgroup og_prefixes
2818 */
2819FNIEMOP_DEF(iemOp_op_size)
2820{
2821 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2822 IEMOP_HLP_MIN_386();
2823
2824 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2825 iemRecalEffOpSize(pVCpu);
2826
2827 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2828 when REPZ or REPNZ are present. */
2829 if (pVCpu->iem.s.idxPrefix == 0)
2830 pVCpu->iem.s.idxPrefix = 1;
2831
2832 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2833 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2834}
2835
2836
2837/**
2838 * @opcode 0x67
2839 * @opmnemonic addrsize
2840 * @openc prefix
2841 * @opmincpu 80386
2842 * @ophints harmless
2843 * @opgroup og_prefixes
2844 */
2845FNIEMOP_DEF(iemOp_addr_size)
2846{
2847 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2848 IEMOP_HLP_MIN_386();
2849
2850 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2851 switch (pVCpu->iem.s.enmDefAddrMode)
2852 {
2853 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2854 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2855 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2856 default: AssertFailed();
2857 }
2858
2859 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2860 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2861}
2862
2863
2864/**
2865 * @opcode 0x68
2866 */
2867FNIEMOP_DEF(iemOp_push_Iz)
2868{
2869 IEMOP_MNEMONIC(push_Iz, "push Iz");
2870 IEMOP_HLP_MIN_186();
2871 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2872 switch (pVCpu->iem.s.enmEffOpSize)
2873 {
2874 case IEMMODE_16BIT:
2875 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186);
2876 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2878 IEM_MC_PUSH_U16(u16Imm);
2879 IEM_MC_ADVANCE_RIP_AND_FINISH();
2880 IEM_MC_END();
2881 break;
2882
2883 case IEMMODE_32BIT:
2884 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT);
2885 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2887 IEM_MC_PUSH_U32(u32Imm);
2888 IEM_MC_ADVANCE_RIP_AND_FINISH();
2889 IEM_MC_END();
2890 break;
2891
2892 case IEMMODE_64BIT:
2893 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT);
2894 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2896 IEM_MC_PUSH_U64(u64Imm);
2897 IEM_MC_ADVANCE_RIP_AND_FINISH();
2898 IEM_MC_END();
2899 break;
2900
2901 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2902 }
2903}
2904
2905
2906/**
2907 * @opcode 0x69
2908 */
2909FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2910{
2911 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2912 IEMOP_HLP_MIN_186();
2913 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2914 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2915
2916 switch (pVCpu->iem.s.enmEffOpSize)
2917 {
2918 case IEMMODE_16BIT:
2919 {
2920 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2921 if (IEM_IS_MODRM_REG_MODE(bRm))
2922 {
2923 /* register operand */
2924 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2925 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186);
2926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2927 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2928 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2929 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2930 IEM_MC_LOCAL(uint16_t, u16Tmp);
2931
2932 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2933 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2934 IEM_MC_REF_EFLAGS(pEFlags);
2935 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2936 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2937
2938 IEM_MC_ADVANCE_RIP_AND_FINISH();
2939 IEM_MC_END();
2940 }
2941 else
2942 {
2943 /* memory operand */
2944 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186);
2945 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2946 IEM_MC_ARG(uint16_t, u16Src, 1);
2947 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2948 IEM_MC_LOCAL(uint16_t, u16Tmp);
2949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2950
2951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2952 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2953 IEM_MC_ASSIGN(u16Src, u16Imm);
2954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2955 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2956 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2957 IEM_MC_REF_EFLAGS(pEFlags);
2958 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2959 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2960
2961 IEM_MC_ADVANCE_RIP_AND_FINISH();
2962 IEM_MC_END();
2963 }
2964 break;
2965 }
2966
2967 case IEMMODE_32BIT:
2968 {
2969 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2970 if (IEM_IS_MODRM_REG_MODE(bRm))
2971 {
2972 /* register operand */
2973 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2974 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386);
2975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2976 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2977 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2978 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2979 IEM_MC_LOCAL(uint32_t, u32Tmp);
2980
2981 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2982 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2983 IEM_MC_REF_EFLAGS(pEFlags);
2984 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2985 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2986
2987 IEM_MC_ADVANCE_RIP_AND_FINISH();
2988 IEM_MC_END();
2989 }
2990 else
2991 {
2992 /* memory operand */
2993 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386);
2994 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2995 IEM_MC_ARG(uint32_t, u32Src, 1);
2996 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2997 IEM_MC_LOCAL(uint32_t, u32Tmp);
2998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2999
3000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3001 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3002 IEM_MC_ASSIGN(u32Src, u32Imm);
3003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3004 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3005 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3006 IEM_MC_REF_EFLAGS(pEFlags);
3007 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3008 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3009
3010 IEM_MC_ADVANCE_RIP_AND_FINISH();
3011 IEM_MC_END();
3012 }
3013 break;
3014 }
3015
3016 case IEMMODE_64BIT:
3017 {
3018 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3019 if (IEM_IS_MODRM_REG_MODE(bRm))
3020 {
3021 /* register operand */
3022 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3023 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT);
3024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3025 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3026 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
3027 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3028 IEM_MC_LOCAL(uint64_t, u64Tmp);
3029
3030 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3031 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3032 IEM_MC_REF_EFLAGS(pEFlags);
3033 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3034 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3035
3036 IEM_MC_ADVANCE_RIP_AND_FINISH();
3037 IEM_MC_END();
3038 }
3039 else
3040 {
3041 /* memory operand */
3042 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT);
3043 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3044 IEM_MC_ARG(uint64_t, u64Src, 1);
3045 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3046 IEM_MC_LOCAL(uint64_t, u64Tmp);
3047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3048
3049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3050 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3051 IEM_MC_ASSIGN_U32_SX_U64(u64Src, u32Imm); /* parameter count for the threaded function for this block. */
3052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3053 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3054 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3055 IEM_MC_REF_EFLAGS(pEFlags);
3056 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3057 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3058
3059 IEM_MC_ADVANCE_RIP_AND_FINISH();
3060 IEM_MC_END();
3061 }
3062 break;
3063 }
3064
3065 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3066 }
3067}
3068
3069
3070/**
3071 * @opcode 0x6a
3072 */
3073FNIEMOP_DEF(iemOp_push_Ib)
3074{
3075 IEMOP_MNEMONIC(push_Ib, "push Ib");
3076 IEMOP_HLP_MIN_186();
3077 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3078 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3079
3080 switch (pVCpu->iem.s.enmEffOpSize)
3081 {
3082 case IEMMODE_16BIT:
3083 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186);
3084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3085 IEM_MC_PUSH_U16(i8Imm);
3086 IEM_MC_ADVANCE_RIP_AND_FINISH();
3087 IEM_MC_END();
3088 break;
3089 case IEMMODE_32BIT:
3090 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT);
3091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3092 IEM_MC_PUSH_U32(i8Imm);
3093 IEM_MC_ADVANCE_RIP_AND_FINISH();
3094 IEM_MC_END();
3095 break;
3096 case IEMMODE_64BIT:
3097 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT);
3098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3099 IEM_MC_PUSH_U64(i8Imm);
3100 IEM_MC_ADVANCE_RIP_AND_FINISH();
3101 IEM_MC_END();
3102 break;
3103 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3104 }
3105}
3106
3107
3108/**
3109 * @opcode 0x6b
3110 */
3111FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3112{
3113 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3114 IEMOP_HLP_MIN_186();
3115 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3116 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3117
3118 switch (pVCpu->iem.s.enmEffOpSize)
3119 {
3120 case IEMMODE_16BIT:
3121 {
3122 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3123 if (IEM_IS_MODRM_REG_MODE(bRm))
3124 {
3125 /* register operand */
3126 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186);
3127 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3129 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3130 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3131 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3132 IEM_MC_LOCAL(uint16_t, u16Tmp);
3133
3134 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3135 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3136 IEM_MC_REF_EFLAGS(pEFlags);
3137 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3138 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3139
3140 IEM_MC_ADVANCE_RIP_AND_FINISH();
3141 IEM_MC_END();
3142 }
3143 else
3144 {
3145 /* memory operand */
3146 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186);
3147 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3148 IEM_MC_ARG(uint16_t, u16Src, 1);
3149 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3150 IEM_MC_LOCAL(uint16_t, u16Tmp);
3151 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3152
3153 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3154 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3155 IEM_MC_ASSIGN(u16Src, u16Imm);
3156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3157 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3158 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3159 IEM_MC_REF_EFLAGS(pEFlags);
3160 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3161 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3162
3163 IEM_MC_ADVANCE_RIP_AND_FINISH();
3164 IEM_MC_END();
3165 }
3166 break;
3167 }
3168
3169 case IEMMODE_32BIT:
3170 {
3171 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3172 if (IEM_IS_MODRM_REG_MODE(bRm))
3173 {
3174 /* register operand */
3175 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3176 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386);
3177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3178 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3179 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3180 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3181 IEM_MC_LOCAL(uint32_t, u32Tmp);
3182
3183 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3184 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3185 IEM_MC_REF_EFLAGS(pEFlags);
3186 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3187 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3188
3189 IEM_MC_ADVANCE_RIP_AND_FINISH();
3190 IEM_MC_END();
3191 }
3192 else
3193 {
3194 /* memory operand */
3195 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386);
3196 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3197 IEM_MC_ARG(uint32_t, u32Src, 1);
3198 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3199 IEM_MC_LOCAL(uint32_t, u32Tmp);
3200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3201
3202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3203 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3204 IEM_MC_ASSIGN(u32Src, u32Imm);
3205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3206 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3207 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3208 IEM_MC_REF_EFLAGS(pEFlags);
3209 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3210 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3211
3212 IEM_MC_ADVANCE_RIP_AND_FINISH();
3213 IEM_MC_END();
3214 }
3215 break;
3216 }
3217
3218 case IEMMODE_64BIT:
3219 {
3220 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3221 if (IEM_IS_MODRM_REG_MODE(bRm))
3222 {
3223 /* register operand */
3224 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3225 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT);
3226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3227 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3228 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
3229 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3230 IEM_MC_LOCAL(uint64_t, u64Tmp);
3231
3232 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3233 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3234 IEM_MC_REF_EFLAGS(pEFlags);
3235 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3236 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3237
3238 IEM_MC_ADVANCE_RIP_AND_FINISH();
3239 IEM_MC_END();
3240 }
3241 else
3242 {
3243 /* memory operand */
3244 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT);
3245 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3246 IEM_MC_ARG(uint64_t, u64Src, 1);
3247 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3248 IEM_MC_LOCAL(uint64_t, u64Tmp);
3249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3250
3251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3252 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the */
3253 IEM_MC_ASSIGN_U8_SX_U64(u64Src, u8Imm); /* parameter count for the threaded function for this block. */
3254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3255 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3256 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3257 IEM_MC_REF_EFLAGS(pEFlags);
3258 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3259 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3260
3261 IEM_MC_ADVANCE_RIP_AND_FINISH();
3262 IEM_MC_END();
3263 }
3264 break;
3265 }
3266
3267 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3268 }
3269}
3270
3271
3272/**
3273 * @opcode 0x6c
3274 */
3275FNIEMOP_DEF(iemOp_insb_Yb_DX)
3276{
3277 IEMOP_HLP_MIN_186();
3278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3279 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3280 {
3281 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3282 switch (pVCpu->iem.s.enmEffAddrMode)
3283 {
3284 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3285 iemCImpl_rep_ins_op8_addr16, false);
3286 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3287 iemCImpl_rep_ins_op8_addr32, false);
3288 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3289 iemCImpl_rep_ins_op8_addr64, false);
3290 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3291 }
3292 }
3293 else
3294 {
3295 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3296 switch (pVCpu->iem.s.enmEffAddrMode)
3297 {
3298 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3299 iemCImpl_ins_op8_addr16, false);
3300 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3301 iemCImpl_ins_op8_addr32, false);
3302 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3303 iemCImpl_ins_op8_addr64, false);
3304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3305 }
3306 }
3307}
3308
3309
3310/**
3311 * @opcode 0x6d
3312 */
3313FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3314{
3315 IEMOP_HLP_MIN_186();
3316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3317 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3318 {
3319 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3320 switch (pVCpu->iem.s.enmEffOpSize)
3321 {
3322 case IEMMODE_16BIT:
3323 switch (pVCpu->iem.s.enmEffAddrMode)
3324 {
3325 case IEMMODE_16BIT:
3326 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3327 iemCImpl_rep_ins_op16_addr16, false);
3328 case IEMMODE_32BIT:
3329 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3330 iemCImpl_rep_ins_op16_addr32, false);
3331 case IEMMODE_64BIT:
3332 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3333 iemCImpl_rep_ins_op16_addr64, false);
3334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3335 }
3336 break;
3337 case IEMMODE_64BIT:
3338 case IEMMODE_32BIT:
3339 switch (pVCpu->iem.s.enmEffAddrMode)
3340 {
3341 case IEMMODE_16BIT:
3342 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3343 iemCImpl_rep_ins_op32_addr16, false);
3344 case IEMMODE_32BIT:
3345 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3346 iemCImpl_rep_ins_op32_addr32, false);
3347 case IEMMODE_64BIT:
3348 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3349 iemCImpl_rep_ins_op32_addr64, false);
3350 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3351 }
3352 break;
3353 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3354 }
3355 }
3356 else
3357 {
3358 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3359 switch (pVCpu->iem.s.enmEffOpSize)
3360 {
3361 case IEMMODE_16BIT:
3362 switch (pVCpu->iem.s.enmEffAddrMode)
3363 {
3364 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3365 iemCImpl_ins_op16_addr16, false);
3366 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3367 iemCImpl_ins_op16_addr32, false);
3368 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3369 iemCImpl_ins_op16_addr64, false);
3370 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3371 }
3372 break;
3373 case IEMMODE_64BIT:
3374 case IEMMODE_32BIT:
3375 switch (pVCpu->iem.s.enmEffAddrMode)
3376 {
3377 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3378 iemCImpl_ins_op32_addr16, false);
3379 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3380 iemCImpl_ins_op32_addr32, false);
3381 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3382 iemCImpl_ins_op32_addr64, false);
3383 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3384 }
3385 break;
3386 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3387 }
3388 }
3389}
3390
3391
3392/**
3393 * @opcode 0x6e
3394 */
3395FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3396{
3397 IEMOP_HLP_MIN_186();
3398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3399 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3400 {
3401 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3402 switch (pVCpu->iem.s.enmEffAddrMode)
3403 {
3404 case IEMMODE_16BIT:
3405 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3406 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3407 case IEMMODE_32BIT:
3408 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3409 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3410 case IEMMODE_64BIT:
3411 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3412 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3413 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3414 }
3415 }
3416 else
3417 {
3418 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3419 switch (pVCpu->iem.s.enmEffAddrMode)
3420 {
3421 case IEMMODE_16BIT:
3422 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3423 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3424 case IEMMODE_32BIT:
3425 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3426 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3427 case IEMMODE_64BIT:
3428 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3429 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3430 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3431 }
3432 }
3433}
3434
3435
3436/**
3437 * @opcode 0x6f
3438 */
3439FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3440{
3441 IEMOP_HLP_MIN_186();
3442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3443 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3444 {
3445 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3446 switch (pVCpu->iem.s.enmEffOpSize)
3447 {
3448 case IEMMODE_16BIT:
3449 switch (pVCpu->iem.s.enmEffAddrMode)
3450 {
3451 case IEMMODE_16BIT:
3452 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3453 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3454 case IEMMODE_32BIT:
3455 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3456 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3457 case IEMMODE_64BIT:
3458 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3459 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3460 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3461 }
3462 break;
3463 case IEMMODE_64BIT:
3464 case IEMMODE_32BIT:
3465 switch (pVCpu->iem.s.enmEffAddrMode)
3466 {
3467 case IEMMODE_16BIT:
3468 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3469 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3470 case IEMMODE_32BIT:
3471 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3472 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3473 case IEMMODE_64BIT:
3474 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3475 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3476 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3477 }
3478 break;
3479 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3480 }
3481 }
3482 else
3483 {
3484 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3485 switch (pVCpu->iem.s.enmEffOpSize)
3486 {
3487 case IEMMODE_16BIT:
3488 switch (pVCpu->iem.s.enmEffAddrMode)
3489 {
3490 case IEMMODE_16BIT:
3491 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3492 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3493 case IEMMODE_32BIT:
3494 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3495 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3496 case IEMMODE_64BIT:
3497 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3498 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3499 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3500 }
3501 break;
3502 case IEMMODE_64BIT:
3503 case IEMMODE_32BIT:
3504 switch (pVCpu->iem.s.enmEffAddrMode)
3505 {
3506 case IEMMODE_16BIT:
3507 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3508 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3509 case IEMMODE_32BIT:
3510 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3511 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3512 case IEMMODE_64BIT:
3513 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3514 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3515 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3516 }
3517 break;
3518 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3519 }
3520 }
3521}
3522
3523
3524/**
3525 * @opcode 0x70
3526 */
3527FNIEMOP_DEF(iemOp_jo_Jb)
3528{
3529 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3530 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3531 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3532
3533 IEM_MC_BEGIN(0, 0, 0);
3534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3535 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3536 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3537 } IEM_MC_ELSE() {
3538 IEM_MC_ADVANCE_RIP_AND_FINISH();
3539 } IEM_MC_ENDIF();
3540 IEM_MC_END();
3541}
3542
3543
3544/**
3545 * @opcode 0x71
3546 */
3547FNIEMOP_DEF(iemOp_jno_Jb)
3548{
3549 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3550 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3551 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3552
3553 IEM_MC_BEGIN(0, 0, 0);
3554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3555 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3556 IEM_MC_ADVANCE_RIP_AND_FINISH();
3557 } IEM_MC_ELSE() {
3558 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3559 } IEM_MC_ENDIF();
3560 IEM_MC_END();
3561}
3562
3563/**
3564 * @opcode 0x72
3565 */
3566FNIEMOP_DEF(iemOp_jc_Jb)
3567{
3568 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3569 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3570 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3571
3572 IEM_MC_BEGIN(0, 0, 0);
3573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3574 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3575 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3576 } IEM_MC_ELSE() {
3577 IEM_MC_ADVANCE_RIP_AND_FINISH();
3578 } IEM_MC_ENDIF();
3579 IEM_MC_END();
3580}
3581
3582
3583/**
3584 * @opcode 0x73
3585 */
3586FNIEMOP_DEF(iemOp_jnc_Jb)
3587{
3588 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3589 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3590 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3591
3592 IEM_MC_BEGIN(0, 0, 0);
3593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3594 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3595 IEM_MC_ADVANCE_RIP_AND_FINISH();
3596 } IEM_MC_ELSE() {
3597 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3598 } IEM_MC_ENDIF();
3599 IEM_MC_END();
3600}
3601
3602
3603/**
3604 * @opcode 0x74
3605 */
3606FNIEMOP_DEF(iemOp_je_Jb)
3607{
3608 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3609 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3610 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3611
3612 IEM_MC_BEGIN(0, 0, 0);
3613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3614 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3615 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3616 } IEM_MC_ELSE() {
3617 IEM_MC_ADVANCE_RIP_AND_FINISH();
3618 } IEM_MC_ENDIF();
3619 IEM_MC_END();
3620}
3621
3622
3623/**
3624 * @opcode 0x75
3625 */
3626FNIEMOP_DEF(iemOp_jne_Jb)
3627{
3628 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3629 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3630 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3631
3632 IEM_MC_BEGIN(0, 0, 0);
3633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3634 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3635 IEM_MC_ADVANCE_RIP_AND_FINISH();
3636 } IEM_MC_ELSE() {
3637 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3638 } IEM_MC_ENDIF();
3639 IEM_MC_END();
3640}
3641
3642
3643/**
3644 * @opcode 0x76
3645 */
3646FNIEMOP_DEF(iemOp_jbe_Jb)
3647{
3648 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3649 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3650 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3651
3652 IEM_MC_BEGIN(0, 0, 0);
3653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3654 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3655 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3656 } IEM_MC_ELSE() {
3657 IEM_MC_ADVANCE_RIP_AND_FINISH();
3658 } IEM_MC_ENDIF();
3659 IEM_MC_END();
3660}
3661
3662
3663/**
3664 * @opcode 0x77
3665 */
3666FNIEMOP_DEF(iemOp_jnbe_Jb)
3667{
3668 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3669 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3670 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3671
3672 IEM_MC_BEGIN(0, 0, 0);
3673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3674 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3675 IEM_MC_ADVANCE_RIP_AND_FINISH();
3676 } IEM_MC_ELSE() {
3677 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3678 } IEM_MC_ENDIF();
3679 IEM_MC_END();
3680}
3681
3682
3683/**
3684 * @opcode 0x78
3685 */
3686FNIEMOP_DEF(iemOp_js_Jb)
3687{
3688 IEMOP_MNEMONIC(js_Jb, "js Jb");
3689 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3690 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3691
3692 IEM_MC_BEGIN(0, 0, 0);
3693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3694 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3695 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3696 } IEM_MC_ELSE() {
3697 IEM_MC_ADVANCE_RIP_AND_FINISH();
3698 } IEM_MC_ENDIF();
3699 IEM_MC_END();
3700}
3701
3702
3703/**
3704 * @opcode 0x79
3705 */
3706FNIEMOP_DEF(iemOp_jns_Jb)
3707{
3708 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3709 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3710 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3711
3712 IEM_MC_BEGIN(0, 0, 0);
3713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3714 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3715 IEM_MC_ADVANCE_RIP_AND_FINISH();
3716 } IEM_MC_ELSE() {
3717 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3718 } IEM_MC_ENDIF();
3719 IEM_MC_END();
3720}
3721
3722
3723/**
3724 * @opcode 0x7a
3725 */
3726FNIEMOP_DEF(iemOp_jp_Jb)
3727{
3728 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3729 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3730 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3731
3732 IEM_MC_BEGIN(0, 0, 0);
3733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3734 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3735 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3736 } IEM_MC_ELSE() {
3737 IEM_MC_ADVANCE_RIP_AND_FINISH();
3738 } IEM_MC_ENDIF();
3739 IEM_MC_END();
3740}
3741
3742
3743/**
3744 * @opcode 0x7b
3745 */
3746FNIEMOP_DEF(iemOp_jnp_Jb)
3747{
3748 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3749 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3750 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3751
3752 IEM_MC_BEGIN(0, 0, 0);
3753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3754 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3755 IEM_MC_ADVANCE_RIP_AND_FINISH();
3756 } IEM_MC_ELSE() {
3757 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3758 } IEM_MC_ENDIF();
3759 IEM_MC_END();
3760}
3761
3762
3763/**
3764 * @opcode 0x7c
3765 */
3766FNIEMOP_DEF(iemOp_jl_Jb)
3767{
3768 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3769 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3770 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3771
3772 IEM_MC_BEGIN(0, 0, 0);
3773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3774 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3775 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3776 } IEM_MC_ELSE() {
3777 IEM_MC_ADVANCE_RIP_AND_FINISH();
3778 } IEM_MC_ENDIF();
3779 IEM_MC_END();
3780}
3781
3782
3783/**
3784 * @opcode 0x7d
3785 */
3786FNIEMOP_DEF(iemOp_jnl_Jb)
3787{
3788 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3789 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3790 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3791
3792 IEM_MC_BEGIN(0, 0, 0);
3793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3794 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3795 IEM_MC_ADVANCE_RIP_AND_FINISH();
3796 } IEM_MC_ELSE() {
3797 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3798 } IEM_MC_ENDIF();
3799 IEM_MC_END();
3800}
3801
3802
3803/**
3804 * @opcode 0x7e
3805 */
3806FNIEMOP_DEF(iemOp_jle_Jb)
3807{
3808 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3809 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3810 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3811
3812 IEM_MC_BEGIN(0, 0, 0);
3813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3814 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3815 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3816 } IEM_MC_ELSE() {
3817 IEM_MC_ADVANCE_RIP_AND_FINISH();
3818 } IEM_MC_ENDIF();
3819 IEM_MC_END();
3820}
3821
3822
3823/**
3824 * @opcode 0x7f
3825 */
3826FNIEMOP_DEF(iemOp_jnle_Jb)
3827{
3828 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3829 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3830 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3831
3832 IEM_MC_BEGIN(0, 0, 0);
3833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3834 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3835 IEM_MC_ADVANCE_RIP_AND_FINISH();
3836 } IEM_MC_ELSE() {
3837 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3838 } IEM_MC_ENDIF();
3839 IEM_MC_END();
3840}
3841
3842
3843/**
3844 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3845 * iemOp_Grp1_Eb_Ib_80.
3846 */
3847#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
3848 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3849 { \
3850 /* register target */ \
3851 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3852 IEM_MC_BEGIN(3, 0, 0); \
3853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3854 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3855 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3856 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3857 \
3858 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3859 IEM_MC_REF_EFLAGS(pEFlags); \
3860 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3861 \
3862 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3863 IEM_MC_END(); \
3864 } \
3865 else \
3866 { \
3867 /* memory target */ \
3868 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3869 { \
3870 IEM_MC_BEGIN(3, 3, 0); \
3871 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3872 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3874 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3875 \
3876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3877 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3878 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3879 IEMOP_HLP_DONE_DECODING(); \
3880 \
3881 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3882 IEM_MC_FETCH_EFLAGS(EFlags); \
3883 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3884 \
3885 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
3886 IEM_MC_COMMIT_EFLAGS(EFlags); \
3887 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3888 IEM_MC_END(); \
3889 } \
3890 else \
3891 { \
3892 (void)0
3893
3894#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3895 IEM_MC_BEGIN(3, 3, 0); \
3896 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3897 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3898 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3899 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3900 \
3901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3902 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3903 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3904 IEMOP_HLP_DONE_DECODING(); \
3905 \
3906 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3907 IEM_MC_FETCH_EFLAGS(EFlags); \
3908 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
3909 \
3910 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
3911 IEM_MC_COMMIT_EFLAGS(EFlags); \
3912 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3913 IEM_MC_END(); \
3914 } \
3915 } \
3916 (void)0
3917
3918#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
3919 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3920 { \
3921 /* register target */ \
3922 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3923 IEM_MC_BEGIN(3, 0, 0); \
3924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3925 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3926 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3927 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3928 \
3929 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3930 IEM_MC_REF_EFLAGS(pEFlags); \
3931 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3932 \
3933 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3934 IEM_MC_END(); \
3935 } \
3936 else \
3937 { \
3938 /* memory target */ \
3939 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3940 { \
3941 IEM_MC_BEGIN(3, 3, 0); \
3942 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
3943 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3945 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3946 \
3947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3948 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3949 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3950 IEMOP_HLP_DONE_DECODING(); \
3951 \
3952 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3953 IEM_MC_FETCH_EFLAGS(EFlags); \
3954 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3955 \
3956 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
3957 IEM_MC_COMMIT_EFLAGS(EFlags); \
3958 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3959 IEM_MC_END(); \
3960 } \
3961 else \
3962 { \
3963 (void)0
3964
3965#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
3966 IEMOP_HLP_DONE_DECODING(); \
3967 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
3968 } \
3969 } \
3970 (void)0
3971
3972
3973
3974/**
3975 * @opmaps grp1_80,grp1_83
3976 * @opcode /0
3977 */
3978FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
3979{
3980 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
3981 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
3982 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
3983}
3984
3985
3986/**
3987 * @opmaps grp1_80,grp1_83
3988 * @opcode /1
3989 */
3990FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
3991{
3992 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
3993 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
3994 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
3995}
3996
3997
3998/**
3999 * @opmaps grp1_80,grp1_83
4000 * @opcode /2
4001 */
4002FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4003{
4004 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4005 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4006 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4007}
4008
4009
4010/**
4011 * @opmaps grp1_80,grp1_83
4012 * @opcode /3
4013 */
4014FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4015{
4016 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4017 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4018 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4019}
4020
4021
4022/**
4023 * @opmaps grp1_80,grp1_83
4024 * @opcode /4
4025 */
4026FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4027{
4028 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4029 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4030 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4031}
4032
4033
4034/**
4035 * @opmaps grp1_80,grp1_83
4036 * @opcode /5
4037 */
4038FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4039{
4040 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4041 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4042 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4043}
4044
4045
4046/**
4047 * @opmaps grp1_80,grp1_83
4048 * @opcode /6
4049 */
4050FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4051{
4052 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4053 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4054 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4055}
4056
4057
4058/**
4059 * @opmaps grp1_80,grp1_83
4060 * @opcode /7
4061 */
4062FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4063{
4064 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4065 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4066 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4067}
4068
4069
4070/**
4071 * @opcode 0x80
4072 */
4073FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4074{
4075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4076 switch (IEM_GET_MODRM_REG_8(bRm))
4077 {
4078 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4079 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4080 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4081 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4082 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4083 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4084 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4085 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4087 }
4088}
4089
4090
4091/**
4092 * Body for a group 1 binary operator.
4093 */
4094#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4095 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4096 { \
4097 /* register target */ \
4098 switch (pVCpu->iem.s.enmEffOpSize) \
4099 { \
4100 case IEMMODE_16BIT: \
4101 { \
4102 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4103 IEM_MC_BEGIN(3, 0, 0); \
4104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4105 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4106 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4107 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4108 \
4109 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4110 IEM_MC_REF_EFLAGS(pEFlags); \
4111 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4112 \
4113 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4114 IEM_MC_END(); \
4115 break; \
4116 } \
4117 \
4118 case IEMMODE_32BIT: \
4119 { \
4120 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4121 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
4122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4123 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4124 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4125 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4126 \
4127 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4128 IEM_MC_REF_EFLAGS(pEFlags); \
4129 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4130 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
4131 \
4132 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4133 IEM_MC_END(); \
4134 break; \
4135 } \
4136 \
4137 case IEMMODE_64BIT: \
4138 { \
4139 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4140 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT); \
4141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4142 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4143 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4144 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4145 \
4146 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4147 IEM_MC_REF_EFLAGS(pEFlags); \
4148 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4149 \
4150 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4151 IEM_MC_END(); \
4152 break; \
4153 } \
4154 \
4155 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4156 } \
4157 } \
4158 else \
4159 { \
4160 /* memory target */ \
4161 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4162 { \
4163 switch (pVCpu->iem.s.enmEffOpSize) \
4164 { \
4165 case IEMMODE_16BIT: \
4166 { \
4167 IEM_MC_BEGIN(3, 3, 0); \
4168 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4169 IEM_MC_ARG(uint16_t, u16Src, 1); \
4170 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4172 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4173 \
4174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4175 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4176 IEM_MC_ASSIGN(u16Src, u16Imm); \
4177 IEMOP_HLP_DONE_DECODING(); \
4178 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4179 IEM_MC_FETCH_EFLAGS(EFlags); \
4180 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4181 \
4182 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4183 IEM_MC_COMMIT_EFLAGS(EFlags); \
4184 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4185 IEM_MC_END(); \
4186 break; \
4187 } \
4188 \
4189 case IEMMODE_32BIT: \
4190 { \
4191 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
4192 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4193 IEM_MC_ARG(uint32_t, u32Src, 1); \
4194 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4196 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4197 \
4198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4199 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4200 IEM_MC_ASSIGN(u32Src, u32Imm); \
4201 IEMOP_HLP_DONE_DECODING(); \
4202 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4203 IEM_MC_FETCH_EFLAGS(EFlags); \
4204 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4205 \
4206 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4207 IEM_MC_COMMIT_EFLAGS(EFlags); \
4208 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4209 IEM_MC_END(); \
4210 break; \
4211 } \
4212 \
4213 case IEMMODE_64BIT: \
4214 { \
4215 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT); \
4216 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4217 IEM_MC_ARG(uint64_t, u64Src, 1); \
4218 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4220 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4221 \
4222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4223 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4224 IEMOP_HLP_DONE_DECODING(); \
4225 IEM_MC_ASSIGN(u64Src, u64Imm); \
4226 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4227 IEM_MC_FETCH_EFLAGS(EFlags); \
4228 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4229 \
4230 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4231 IEM_MC_COMMIT_EFLAGS(EFlags); \
4232 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4233 IEM_MC_END(); \
4234 break; \
4235 } \
4236 \
4237 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4238 } \
4239 } \
4240 else \
4241 { \
4242 (void)0
4243/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4244#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4245 switch (pVCpu->iem.s.enmEffOpSize) \
4246 { \
4247 case IEMMODE_16BIT: \
4248 { \
4249 IEM_MC_BEGIN(3, 3, 0); \
4250 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4251 IEM_MC_ARG(uint16_t, u16Src, 1); \
4252 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4254 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4255 \
4256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4257 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4258 IEM_MC_ASSIGN(u16Src, u16Imm); \
4259 IEMOP_HLP_DONE_DECODING(); \
4260 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4261 IEM_MC_FETCH_EFLAGS(EFlags); \
4262 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4263 \
4264 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4265 IEM_MC_COMMIT_EFLAGS(EFlags); \
4266 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4267 IEM_MC_END(); \
4268 break; \
4269 } \
4270 \
4271 case IEMMODE_32BIT: \
4272 { \
4273 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
4274 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4275 IEM_MC_ARG(uint32_t, u32Src, 1); \
4276 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4278 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4279 \
4280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4281 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4282 IEM_MC_ASSIGN(u32Src, u32Imm); \
4283 IEMOP_HLP_DONE_DECODING(); \
4284 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4285 IEM_MC_FETCH_EFLAGS(EFlags); \
4286 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4287 \
4288 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4289 IEM_MC_COMMIT_EFLAGS(EFlags); \
4290 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4291 IEM_MC_END(); \
4292 break; \
4293 } \
4294 \
4295 case IEMMODE_64BIT: \
4296 { \
4297 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT); \
4298 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4299 IEM_MC_ARG(uint64_t, u64Src, 1); \
4300 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4302 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4303 \
4304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4305 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4306 IEMOP_HLP_DONE_DECODING(); \
4307 IEM_MC_ASSIGN(u64Src, u64Imm); \
4308 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4309 IEM_MC_FETCH_EFLAGS(EFlags); \
4310 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4311 \
4312 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4313 IEM_MC_COMMIT_EFLAGS(EFlags); \
4314 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4315 IEM_MC_END(); \
4316 break; \
4317 } \
4318 \
4319 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4320 } \
4321 } \
4322 } \
4323 (void)0
4324
4325/* read-only version */
4326#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4327 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4328 { \
4329 /* register target */ \
4330 switch (pVCpu->iem.s.enmEffOpSize) \
4331 { \
4332 case IEMMODE_16BIT: \
4333 { \
4334 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4335 IEM_MC_BEGIN(3, 0, 0); \
4336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4337 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4338 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4339 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4340 \
4341 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4342 IEM_MC_REF_EFLAGS(pEFlags); \
4343 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4344 \
4345 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4346 IEM_MC_END(); \
4347 break; \
4348 } \
4349 \
4350 case IEMMODE_32BIT: \
4351 { \
4352 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4353 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
4354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4355 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4356 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4357 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4358 \
4359 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4360 IEM_MC_REF_EFLAGS(pEFlags); \
4361 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4362 \
4363 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4364 IEM_MC_END(); \
4365 break; \
4366 } \
4367 \
4368 case IEMMODE_64BIT: \
4369 { \
4370 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4371 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT); \
4372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4373 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4374 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4375 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4376 \
4377 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4378 IEM_MC_REF_EFLAGS(pEFlags); \
4379 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4380 \
4381 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4382 IEM_MC_END(); \
4383 break; \
4384 } \
4385 \
4386 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4387 } \
4388 } \
4389 else \
4390 { \
4391 /* memory target */ \
4392 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4393 { \
4394 switch (pVCpu->iem.s.enmEffOpSize) \
4395 { \
4396 case IEMMODE_16BIT: \
4397 { \
4398 IEM_MC_BEGIN(3, 3, 0); \
4399 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4400 IEM_MC_ARG(uint16_t, u16Src, 1); \
4401 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4402 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4403 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4404 \
4405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4406 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4407 IEM_MC_ASSIGN(u16Src, u16Imm); \
4408 IEMOP_HLP_DONE_DECODING(); \
4409 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4410 IEM_MC_FETCH_EFLAGS(EFlags); \
4411 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4412 \
4413 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
4414 IEM_MC_COMMIT_EFLAGS(EFlags); \
4415 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4416 IEM_MC_END(); \
4417 break; \
4418 } \
4419 \
4420 case IEMMODE_32BIT: \
4421 { \
4422 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
4423 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4424 IEM_MC_ARG(uint32_t, u32Src, 1); \
4425 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4427 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4428 \
4429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4430 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4431 IEM_MC_ASSIGN(u32Src, u32Imm); \
4432 IEMOP_HLP_DONE_DECODING(); \
4433 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4434 IEM_MC_FETCH_EFLAGS(EFlags); \
4435 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4436 \
4437 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
4438 IEM_MC_COMMIT_EFLAGS(EFlags); \
4439 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4440 IEM_MC_END(); \
4441 break; \
4442 } \
4443 \
4444 case IEMMODE_64BIT: \
4445 { \
4446 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT); \
4447 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4448 IEM_MC_ARG(uint64_t, u64Src, 1); \
4449 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4451 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4452 \
4453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4454 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4455 IEMOP_HLP_DONE_DECODING(); \
4456 IEM_MC_ASSIGN(u64Src, u64Imm); \
4457 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4458 IEM_MC_FETCH_EFLAGS(EFlags); \
4459 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4460 \
4461 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
4462 IEM_MC_COMMIT_EFLAGS(EFlags); \
4463 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4464 IEM_MC_END(); \
4465 break; \
4466 } \
4467 \
4468 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4469 } \
4470 } \
4471 else \
4472 { \
4473 IEMOP_HLP_DONE_DECODING(); \
4474 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4475 } \
4476 } \
4477 (void)0
4478
4479
4480/**
4481 * @opmaps grp1_81
4482 * @opcode /0
4483 */
4484FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4485{
4486 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4487 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4488 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4489}
4490
4491
4492/**
4493 * @opmaps grp1_81
4494 * @opcode /1
4495 */
4496FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4497{
4498 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4499 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4500 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4501}
4502
4503
4504/**
4505 * @opmaps grp1_81
4506 * @opcode /2
4507 */
4508FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4509{
4510 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4511 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4512 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4513}
4514
4515
4516/**
4517 * @opmaps grp1_81
4518 * @opcode /3
4519 */
4520FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4521{
4522 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4523 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4524 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4525}
4526
4527
4528/**
4529 * @opmaps grp1_81
4530 * @opcode /4
4531 */
4532FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4533{
4534 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4535 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4536 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4537}
4538
4539
4540/**
4541 * @opmaps grp1_81
4542 * @opcode /5
4543 */
4544FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4545{
4546 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4547 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4548 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4549}
4550
4551
4552/**
4553 * @opmaps grp1_81
4554 * @opcode /6
4555 */
4556FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4557{
4558 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4559 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4560 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4561}
4562
4563
4564/**
4565 * @opmaps grp1_81
4566 * @opcode /7
4567 */
4568FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4569{
4570 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4571 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4572}
4573
4574
4575/**
4576 * @opcode 0x81
4577 */
4578FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4579{
4580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4581 switch (IEM_GET_MODRM_REG_8(bRm))
4582 {
4583 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4584 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4585 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4586 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4587 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4588 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4589 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4590 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4591 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4592 }
4593}
4594
4595
4596/**
4597 * @opcode 0x82
4598 * @opmnemonic grp1_82
4599 * @opgroup og_groups
4600 */
4601FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4602{
4603 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4604 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4605}
4606
4607
4608/**
4609 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4610 * iemOp_Grp1_Ev_Ib.
4611 */
4612#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4613 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4614 { \
4615 /* \
4616 * Register target \
4617 */ \
4618 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4619 switch (pVCpu->iem.s.enmEffOpSize) \
4620 { \
4621 case IEMMODE_16BIT: \
4622 IEM_MC_BEGIN(3, 0, 0); \
4623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4624 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4625 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4626 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4627 \
4628 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4629 IEM_MC_REF_EFLAGS(pEFlags); \
4630 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4631 \
4632 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4633 IEM_MC_END(); \
4634 break; \
4635 \
4636 case IEMMODE_32BIT: \
4637 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
4638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4639 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4640 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4641 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4642 \
4643 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4644 IEM_MC_REF_EFLAGS(pEFlags); \
4645 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4646 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
4647 \
4648 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4649 IEM_MC_END(); \
4650 break; \
4651 \
4652 case IEMMODE_64BIT: \
4653 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT); \
4654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4655 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4656 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4657 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4658 \
4659 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4660 IEM_MC_REF_EFLAGS(pEFlags); \
4661 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4662 \
4663 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4664 IEM_MC_END(); \
4665 break; \
4666 \
4667 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4668 } \
4669 } \
4670 else \
4671 { \
4672 /* \
4673 * Memory target. \
4674 */ \
4675 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4676 { \
4677 switch (pVCpu->iem.s.enmEffOpSize) \
4678 { \
4679 case IEMMODE_16BIT: \
4680 IEM_MC_BEGIN(3, 3, 0); \
4681 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4682 IEM_MC_ARG(uint16_t, u16Src, 1); \
4683 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4684 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4685 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4686 \
4687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4688 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4689 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4690 IEMOP_HLP_DONE_DECODING(); \
4691 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4692 IEM_MC_FETCH_EFLAGS(EFlags); \
4693 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4694 \
4695 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4696 IEM_MC_COMMIT_EFLAGS(EFlags); \
4697 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4698 IEM_MC_END(); \
4699 break; \
4700 \
4701 case IEMMODE_32BIT: \
4702 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
4703 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4704 IEM_MC_ARG(uint32_t, u32Src, 1); \
4705 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4707 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4708 \
4709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4710 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4711 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4712 IEMOP_HLP_DONE_DECODING(); \
4713 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4714 IEM_MC_FETCH_EFLAGS(EFlags); \
4715 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4716 \
4717 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4718 IEM_MC_COMMIT_EFLAGS(EFlags); \
4719 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4720 IEM_MC_END(); \
4721 break; \
4722 \
4723 case IEMMODE_64BIT: \
4724 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT); \
4725 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4726 IEM_MC_ARG(uint64_t, u64Src, 1); \
4727 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4728 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4729 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4730 \
4731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4732 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4733 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4734 IEMOP_HLP_DONE_DECODING(); \
4735 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4736 IEM_MC_FETCH_EFLAGS(EFlags); \
4737 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4738 \
4739 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4740 IEM_MC_COMMIT_EFLAGS(EFlags); \
4741 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4742 IEM_MC_END(); \
4743 break; \
4744 \
4745 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4746 } \
4747 } \
4748 else \
4749 { \
4750 (void)0
4751/* Separate macro to work around parsing issue in IEMAllInstPython.py */
4752#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4753 switch (pVCpu->iem.s.enmEffOpSize) \
4754 { \
4755 case IEMMODE_16BIT: \
4756 IEM_MC_BEGIN(3, 3, 0); \
4757 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4758 IEM_MC_ARG(uint16_t, u16Src, 1); \
4759 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4761 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4762 \
4763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4764 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4765 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4766 IEMOP_HLP_DONE_DECODING(); \
4767 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4768 IEM_MC_FETCH_EFLAGS(EFlags); \
4769 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4770 \
4771 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4772 IEM_MC_COMMIT_EFLAGS(EFlags); \
4773 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4774 IEM_MC_END(); \
4775 break; \
4776 \
4777 case IEMMODE_32BIT: \
4778 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
4779 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4780 IEM_MC_ARG(uint32_t, u32Src, 1); \
4781 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4783 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4784 \
4785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4786 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4787 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4788 IEMOP_HLP_DONE_DECODING(); \
4789 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4790 IEM_MC_FETCH_EFLAGS(EFlags); \
4791 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4792 \
4793 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4794 IEM_MC_COMMIT_EFLAGS(EFlags); \
4795 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4796 IEM_MC_END(); \
4797 break; \
4798 \
4799 case IEMMODE_64BIT: \
4800 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT); \
4801 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4802 IEM_MC_ARG(uint64_t, u64Src, 1); \
4803 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4805 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4806 \
4807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4808 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4809 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4810 IEMOP_HLP_DONE_DECODING(); \
4811 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4812 IEM_MC_FETCH_EFLAGS(EFlags); \
4813 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4814 \
4815 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4816 IEM_MC_COMMIT_EFLAGS(EFlags); \
4817 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4818 IEM_MC_END(); \
4819 break; \
4820 \
4821 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4822 } \
4823 } \
4824 } \
4825 (void)0
4826
4827/* read-only variant */
4828#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4829 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4830 { \
4831 /* \
4832 * Register target \
4833 */ \
4834 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4835 switch (pVCpu->iem.s.enmEffOpSize) \
4836 { \
4837 case IEMMODE_16BIT: \
4838 IEM_MC_BEGIN(3, 0, 0); \
4839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4840 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4841 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4842 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4843 \
4844 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4845 IEM_MC_REF_EFLAGS(pEFlags); \
4846 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4847 \
4848 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4849 IEM_MC_END(); \
4850 break; \
4851 \
4852 case IEMMODE_32BIT: \
4853 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
4854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4855 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4856 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4857 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4858 \
4859 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4860 IEM_MC_REF_EFLAGS(pEFlags); \
4861 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4862 \
4863 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4864 IEM_MC_END(); \
4865 break; \
4866 \
4867 case IEMMODE_64BIT: \
4868 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT); \
4869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4870 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4871 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4872 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4873 \
4874 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4875 IEM_MC_REF_EFLAGS(pEFlags); \
4876 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4877 \
4878 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4879 IEM_MC_END(); \
4880 break; \
4881 \
4882 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4883 } \
4884 } \
4885 else \
4886 { \
4887 /* \
4888 * Memory target. \
4889 */ \
4890 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4891 { \
4892 switch (pVCpu->iem.s.enmEffOpSize) \
4893 { \
4894 case IEMMODE_16BIT: \
4895 IEM_MC_BEGIN(3, 3, 0); \
4896 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4897 IEM_MC_ARG(uint16_t, u16Src, 1); \
4898 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4900 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4901 \
4902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4903 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4904 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4905 IEMOP_HLP_DONE_DECODING(); \
4906 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4907 IEM_MC_FETCH_EFLAGS(EFlags); \
4908 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4909 \
4910 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
4911 IEM_MC_COMMIT_EFLAGS(EFlags); \
4912 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4913 IEM_MC_END(); \
4914 break; \
4915 \
4916 case IEMMODE_32BIT: \
4917 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
4918 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4919 IEM_MC_ARG(uint32_t, u32Src, 1); \
4920 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4922 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4923 \
4924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4925 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4926 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4927 IEMOP_HLP_DONE_DECODING(); \
4928 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4929 IEM_MC_FETCH_EFLAGS(EFlags); \
4930 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4931 \
4932 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
4933 IEM_MC_COMMIT_EFLAGS(EFlags); \
4934 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4935 IEM_MC_END(); \
4936 break; \
4937 \
4938 case IEMMODE_64BIT: \
4939 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT); \
4940 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4941 IEM_MC_ARG(uint64_t, u64Src, 1); \
4942 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4943 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4944 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4945 \
4946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4947 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4948 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4949 IEMOP_HLP_DONE_DECODING(); \
4950 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4951 IEM_MC_FETCH_EFLAGS(EFlags); \
4952 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4953 \
4954 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
4955 IEM_MC_COMMIT_EFLAGS(EFlags); \
4956 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4957 IEM_MC_END(); \
4958 break; \
4959 \
4960 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4961 } \
4962 } \
4963 else \
4964 { \
4965 IEMOP_HLP_DONE_DECODING(); \
4966 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4967 } \
4968 } \
4969 (void)0
4970
4971/**
4972 * @opmaps grp1_83
4973 * @opcode /0
4974 */
4975FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
4976{
4977 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
4978 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4979 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4980}
4981
4982
4983/**
4984 * @opmaps grp1_83
4985 * @opcode /1
4986 */
4987FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
4988{
4989 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
4990 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4991 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4992}
4993
4994
4995/**
4996 * @opmaps grp1_83
4997 * @opcode /2
4998 */
4999FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5000{
5001 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5002 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5003 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5004}
5005
5006
5007/**
5008 * @opmaps grp1_83
5009 * @opcode /3
5010 */
5011FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5012{
5013 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5014 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5015 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5016}
5017
5018
5019/**
5020 * @opmaps grp1_83
5021 * @opcode /4
5022 */
5023FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5024{
5025 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5026 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5027 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5028}
5029
5030
5031/**
5032 * @opmaps grp1_83
5033 * @opcode /5
5034 */
5035FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5036{
5037 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5038 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5039 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5040}
5041
5042
5043/**
5044 * @opmaps grp1_83
5045 * @opcode /6
5046 */
5047FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5048{
5049 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5050 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5051 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5052}
5053
5054
5055/**
5056 * @opmaps grp1_83
5057 * @opcode /7
5058 */
5059FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5060{
5061 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5062 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5063}
5064
5065
5066/**
5067 * @opcode 0x83
5068 */
5069FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5070{
5071 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5072 to the 386 even if absent in the intel reference manuals and some
5073 3rd party opcode listings. */
5074 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5075 switch (IEM_GET_MODRM_REG_8(bRm))
5076 {
5077 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5078 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5079 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5080 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5081 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5082 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5083 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5084 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5085 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5086 }
5087}
5088
5089
5090/**
5091 * @opcode 0x84
5092 */
5093FNIEMOP_DEF(iemOp_test_Eb_Gb)
5094{
5095 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5096 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5097 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8);
5098 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
5099}
5100
5101
5102/**
5103 * @opcode 0x85
5104 */
5105FNIEMOP_DEF(iemOp_test_Ev_Gv)
5106{
5107 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5108 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5109 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64);
5110}
5111
5112
5113/**
5114 * @opcode 0x86
5115 */
5116FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5117{
5118 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5119 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5120
5121 /*
5122 * If rm is denoting a register, no more instruction bytes.
5123 */
5124 if (IEM_IS_MODRM_REG_MODE(bRm))
5125 {
5126 IEM_MC_BEGIN(0, 2, 0);
5127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5128 IEM_MC_LOCAL(uint8_t, uTmp1);
5129 IEM_MC_LOCAL(uint8_t, uTmp2);
5130
5131 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5132 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5133 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5134 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5135
5136 IEM_MC_ADVANCE_RIP_AND_FINISH();
5137 IEM_MC_END();
5138 }
5139 else
5140 {
5141 /*
5142 * We're accessing memory.
5143 */
5144 IEM_MC_BEGIN(2, 4, 0);
5145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5146 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5147 IEM_MC_LOCAL(uint8_t, uTmpReg);
5148 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
5149 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1);
5150
5151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5153 IEM_MC_MEM_MAP_U8_RW(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5154 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5155 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5156 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
5157 else
5158 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
5159 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Mem, bUnmapInfo);
5160 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5161
5162 IEM_MC_ADVANCE_RIP_AND_FINISH();
5163 IEM_MC_END();
5164 }
5165}
5166
5167
5168/**
5169 * @opcode 0x87
5170 */
5171FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5172{
5173 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5174 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5175
5176 /*
5177 * If rm is denoting a register, no more instruction bytes.
5178 */
5179 if (IEM_IS_MODRM_REG_MODE(bRm))
5180 {
5181 switch (pVCpu->iem.s.enmEffOpSize)
5182 {
5183 case IEMMODE_16BIT:
5184 IEM_MC_BEGIN(0, 2, 0);
5185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5186 IEM_MC_LOCAL(uint16_t, uTmp1);
5187 IEM_MC_LOCAL(uint16_t, uTmp2);
5188
5189 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5190 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5191 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5192 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5193
5194 IEM_MC_ADVANCE_RIP_AND_FINISH();
5195 IEM_MC_END();
5196 break;
5197
5198 case IEMMODE_32BIT:
5199 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386);
5200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5201 IEM_MC_LOCAL(uint32_t, uTmp1);
5202 IEM_MC_LOCAL(uint32_t, uTmp2);
5203
5204 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5205 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5206 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5207 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5208
5209 IEM_MC_ADVANCE_RIP_AND_FINISH();
5210 IEM_MC_END();
5211 break;
5212
5213 case IEMMODE_64BIT:
5214 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
5215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5216 IEM_MC_LOCAL(uint64_t, uTmp1);
5217 IEM_MC_LOCAL(uint64_t, uTmp2);
5218
5219 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5220 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5221 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5222 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5223
5224 IEM_MC_ADVANCE_RIP_AND_FINISH();
5225 IEM_MC_END();
5226 break;
5227
5228 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5229 }
5230 }
5231 else
5232 {
5233 /*
5234 * We're accessing memory.
5235 */
5236 switch (pVCpu->iem.s.enmEffOpSize)
5237 {
5238 case IEMMODE_16BIT:
5239 IEM_MC_BEGIN(2, 4, 0);
5240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5241 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5242 IEM_MC_LOCAL(uint16_t, uTmpReg);
5243 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
5244 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1);
5245
5246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5248 IEM_MC_MEM_MAP_U16_RW(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5249 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5250 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5251 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
5252 else
5253 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
5254 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Mem, bUnmapInfo);
5255 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5256
5257 IEM_MC_ADVANCE_RIP_AND_FINISH();
5258 IEM_MC_END();
5259 break;
5260
5261 case IEMMODE_32BIT:
5262 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386);
5263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5264 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5265 IEM_MC_LOCAL(uint32_t, uTmpReg);
5266 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
5267 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1);
5268
5269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5271 IEM_MC_MEM_MAP_U32_RW(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5272 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5273 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5274 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
5275 else
5276 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
5277 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Mem, bUnmapInfo);
5278 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5279
5280 IEM_MC_ADVANCE_RIP_AND_FINISH();
5281 IEM_MC_END();
5282 break;
5283
5284 case IEMMODE_64BIT:
5285 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT);
5286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5287 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5288 IEM_MC_LOCAL(uint64_t, uTmpReg);
5289 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
5290 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1);
5291
5292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5294 IEM_MC_MEM_MAP_U64_RW(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5295 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5296 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5297 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
5298 else
5299 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
5300 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Mem, bUnmapInfo);
5301 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5302
5303 IEM_MC_ADVANCE_RIP_AND_FINISH();
5304 IEM_MC_END();
5305 break;
5306
5307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5308 }
5309 }
5310}
5311
5312
5313/**
5314 * @opcode 0x88
5315 */
5316FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5317{
5318 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5319
5320 uint8_t bRm;
5321 IEM_OPCODE_GET_NEXT_U8(&bRm);
5322
5323 /*
5324 * If rm is denoting a register, no more instruction bytes.
5325 */
5326 if (IEM_IS_MODRM_REG_MODE(bRm))
5327 {
5328 IEM_MC_BEGIN(0, 1, 0);
5329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5330 IEM_MC_LOCAL(uint8_t, u8Value);
5331 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5332 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5333 IEM_MC_ADVANCE_RIP_AND_FINISH();
5334 IEM_MC_END();
5335 }
5336 else
5337 {
5338 /*
5339 * We're writing a register to memory.
5340 */
5341 IEM_MC_BEGIN(0, 2, 0);
5342 IEM_MC_LOCAL(uint8_t, u8Value);
5343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5346 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5347 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5348 IEM_MC_ADVANCE_RIP_AND_FINISH();
5349 IEM_MC_END();
5350 }
5351}
5352
5353
5354/**
5355 * @opcode 0x89
5356 */
5357FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5358{
5359 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5360
5361 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5362
5363 /*
5364 * If rm is denoting a register, no more instruction bytes.
5365 */
5366 if (IEM_IS_MODRM_REG_MODE(bRm))
5367 {
5368 switch (pVCpu->iem.s.enmEffOpSize)
5369 {
5370 case IEMMODE_16BIT:
5371 IEM_MC_BEGIN(0, 1, 0);
5372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5373 IEM_MC_LOCAL(uint16_t, u16Value);
5374 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5375 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5376 IEM_MC_ADVANCE_RIP_AND_FINISH();
5377 IEM_MC_END();
5378 break;
5379
5380 case IEMMODE_32BIT:
5381 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
5382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5383 IEM_MC_LOCAL(uint32_t, u32Value);
5384 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5385 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5386 IEM_MC_ADVANCE_RIP_AND_FINISH();
5387 IEM_MC_END();
5388 break;
5389
5390 case IEMMODE_64BIT:
5391 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
5392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5393 IEM_MC_LOCAL(uint64_t, u64Value);
5394 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5395 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5396 IEM_MC_ADVANCE_RIP_AND_FINISH();
5397 IEM_MC_END();
5398 break;
5399
5400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5401 }
5402 }
5403 else
5404 {
5405 /*
5406 * We're writing a register to memory.
5407 */
5408 switch (pVCpu->iem.s.enmEffOpSize)
5409 {
5410 case IEMMODE_16BIT:
5411 IEM_MC_BEGIN(0, 2, 0);
5412 IEM_MC_LOCAL(uint16_t, u16Value);
5413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5416 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5417 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5418 IEM_MC_ADVANCE_RIP_AND_FINISH();
5419 IEM_MC_END();
5420 break;
5421
5422 case IEMMODE_32BIT:
5423 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386);
5424 IEM_MC_LOCAL(uint32_t, u32Value);
5425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5428 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5429 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5430 IEM_MC_ADVANCE_RIP_AND_FINISH();
5431 IEM_MC_END();
5432 break;
5433
5434 case IEMMODE_64BIT:
5435 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
5436 IEM_MC_LOCAL(uint64_t, u64Value);
5437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5440 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5441 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5442 IEM_MC_ADVANCE_RIP_AND_FINISH();
5443 IEM_MC_END();
5444 break;
5445
5446 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5447 }
5448 }
5449}
5450
5451
5452/**
5453 * @opcode 0x8a
5454 */
5455FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5456{
5457 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5458
5459 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5460
5461 /*
5462 * If rm is denoting a register, no more instruction bytes.
5463 */
5464 if (IEM_IS_MODRM_REG_MODE(bRm))
5465 {
5466 IEM_MC_BEGIN(0, 1, 0);
5467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5468 IEM_MC_LOCAL(uint8_t, u8Value);
5469 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5470 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5471 IEM_MC_ADVANCE_RIP_AND_FINISH();
5472 IEM_MC_END();
5473 }
5474 else
5475 {
5476 /*
5477 * We're loading a register from memory.
5478 */
5479 IEM_MC_BEGIN(0, 2, 0);
5480 IEM_MC_LOCAL(uint8_t, u8Value);
5481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5484 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5485 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5486 IEM_MC_ADVANCE_RIP_AND_FINISH();
5487 IEM_MC_END();
5488 }
5489}
5490
5491
5492/**
5493 * @opcode 0x8b
5494 */
5495FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5496{
5497 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5498
5499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5500
5501 /*
5502 * If rm is denoting a register, no more instruction bytes.
5503 */
5504 if (IEM_IS_MODRM_REG_MODE(bRm))
5505 {
5506 switch (pVCpu->iem.s.enmEffOpSize)
5507 {
5508 case IEMMODE_16BIT:
5509 IEM_MC_BEGIN(0, 1, 0);
5510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5511 IEM_MC_LOCAL(uint16_t, u16Value);
5512 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5513 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5514 IEM_MC_ADVANCE_RIP_AND_FINISH();
5515 IEM_MC_END();
5516 break;
5517
5518 case IEMMODE_32BIT:
5519 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
5520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5521 IEM_MC_LOCAL(uint32_t, u32Value);
5522 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5523 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5524 IEM_MC_ADVANCE_RIP_AND_FINISH();
5525 IEM_MC_END();
5526 break;
5527
5528 case IEMMODE_64BIT:
5529 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
5530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5531 IEM_MC_LOCAL(uint64_t, u64Value);
5532 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5533 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5534 IEM_MC_ADVANCE_RIP_AND_FINISH();
5535 IEM_MC_END();
5536 break;
5537
5538 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5539 }
5540 }
5541 else
5542 {
5543 /*
5544 * We're loading a register from memory.
5545 */
5546 switch (pVCpu->iem.s.enmEffOpSize)
5547 {
5548 case IEMMODE_16BIT:
5549 IEM_MC_BEGIN(0, 2, 0);
5550 IEM_MC_LOCAL(uint16_t, u16Value);
5551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5554 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5555 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5556 IEM_MC_ADVANCE_RIP_AND_FINISH();
5557 IEM_MC_END();
5558 break;
5559
5560 case IEMMODE_32BIT:
5561 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386);
5562 IEM_MC_LOCAL(uint32_t, u32Value);
5563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5566 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5567 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5568 IEM_MC_ADVANCE_RIP_AND_FINISH();
5569 IEM_MC_END();
5570 break;
5571
5572 case IEMMODE_64BIT:
5573 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
5574 IEM_MC_LOCAL(uint64_t, u64Value);
5575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5578 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5579 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5580 IEM_MC_ADVANCE_RIP_AND_FINISH();
5581 IEM_MC_END();
5582 break;
5583
5584 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5585 }
5586 }
5587}
5588
5589
5590/**
5591 * opcode 0x63
5592 * @todo Table fixme
5593 */
5594FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5595{
5596 if (!IEM_IS_64BIT_CODE(pVCpu))
5597 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5598 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5599 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5600 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5601}
5602
5603
5604/**
5605 * @opcode 0x8c
5606 */
5607FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5608{
5609 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5610
5611 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5612
5613 /*
5614 * Check that the destination register exists. The REX.R prefix is ignored.
5615 */
5616 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5617 if (iSegReg > X86_SREG_GS)
5618 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5619
5620 /*
5621 * If rm is denoting a register, no more instruction bytes.
5622 * In that case, the operand size is respected and the upper bits are
5623 * cleared (starting with some pentium).
5624 */
5625 if (IEM_IS_MODRM_REG_MODE(bRm))
5626 {
5627 switch (pVCpu->iem.s.enmEffOpSize)
5628 {
5629 case IEMMODE_16BIT:
5630 IEM_MC_BEGIN(0, 1, 0);
5631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5632 IEM_MC_LOCAL(uint16_t, u16Value);
5633 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5634 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5635 IEM_MC_ADVANCE_RIP_AND_FINISH();
5636 IEM_MC_END();
5637 break;
5638
5639 case IEMMODE_32BIT:
5640 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
5641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5642 IEM_MC_LOCAL(uint32_t, u32Value);
5643 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5644 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5645 IEM_MC_ADVANCE_RIP_AND_FINISH();
5646 IEM_MC_END();
5647 break;
5648
5649 case IEMMODE_64BIT:
5650 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
5651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5652 IEM_MC_LOCAL(uint64_t, u64Value);
5653 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5654 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5655 IEM_MC_ADVANCE_RIP_AND_FINISH();
5656 IEM_MC_END();
5657 break;
5658
5659 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5660 }
5661 }
5662 else
5663 {
5664 /*
5665 * We're saving the register to memory. The access is word sized
5666 * regardless of operand size prefixes.
5667 */
5668#if 0 /* not necessary */
5669 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5670#endif
5671 IEM_MC_BEGIN(0, 2, 0);
5672 IEM_MC_LOCAL(uint16_t, u16Value);
5673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5676 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5677 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5678 IEM_MC_ADVANCE_RIP_AND_FINISH();
5679 IEM_MC_END();
5680 }
5681}
5682
5683
5684
5685
5686/**
5687 * @opcode 0x8d
5688 */
5689FNIEMOP_DEF(iemOp_lea_Gv_M)
5690{
5691 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5692 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5693 if (IEM_IS_MODRM_REG_MODE(bRm))
5694 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5695
5696 switch (pVCpu->iem.s.enmEffOpSize)
5697 {
5698 case IEMMODE_16BIT:
5699 IEM_MC_BEGIN(0, 2, 0);
5700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5701 IEM_MC_LOCAL(uint16_t, u16Cast);
5702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5704 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5705 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5706 IEM_MC_ADVANCE_RIP_AND_FINISH();
5707 IEM_MC_END();
5708 break;
5709
5710 case IEMMODE_32BIT:
5711 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386);
5712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5713 IEM_MC_LOCAL(uint32_t, u32Cast);
5714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5716 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5717 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5718 IEM_MC_ADVANCE_RIP_AND_FINISH();
5719 IEM_MC_END();
5720 break;
5721
5722 case IEMMODE_64BIT:
5723 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
5724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5727 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5728 IEM_MC_ADVANCE_RIP_AND_FINISH();
5729 IEM_MC_END();
5730 break;
5731
5732 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5733 }
5734}
5735
5736
5737/**
5738 * @opcode 0x8e
5739 */
5740FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5741{
5742 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5743
5744 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5745
5746 /*
5747 * The practical operand size is 16-bit.
5748 */
5749#if 0 /* not necessary */
5750 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5751#endif
5752
5753 /*
5754 * Check that the destination register exists and can be used with this
5755 * instruction. The REX.R prefix is ignored.
5756 */
5757 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5758 /** @todo r=bird: What does 8086 do here wrt CS? */
5759 if ( iSegReg == X86_SREG_CS
5760 || iSegReg > X86_SREG_GS)
5761 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5762
5763 /*
5764 * If rm is denoting a register, no more instruction bytes.
5765 */
5766 if (IEM_IS_MODRM_REG_MODE(bRm))
5767 {
5768 IEM_MC_BEGIN(2, 0, 0);
5769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5770 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5771 IEM_MC_ARG(uint16_t, u16Value, 1);
5772 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5773 if (iSRegArg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5774 IEM_MC_CALL_CIMPL_2( 0, iemCImpl_load_SReg, iSRegArg, u16Value);
5775 else
5776 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE, iemCImpl_load_SReg, iSRegArg, u16Value);
5777 IEM_MC_END();
5778 }
5779 else
5780 {
5781 /*
5782 * We're loading the register from memory. The access is word sized
5783 * regardless of operand size prefixes.
5784 */
5785 IEM_MC_BEGIN(2, 1, 0);
5786 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5787 IEM_MC_ARG(uint16_t, u16Value, 1);
5788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5791 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5792 if (iSRegArg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5793 IEM_MC_CALL_CIMPL_2( 0, iemCImpl_load_SReg, iSRegArg, u16Value);
5794 else
5795 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE, iemCImpl_load_SReg, iSRegArg, u16Value);
5796 IEM_MC_END();
5797 }
5798}
5799
5800
5801/** Opcode 0x8f /0. */
5802FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5803{
5804 /* This bugger is rather annoying as it requires rSP to be updated before
5805 doing the effective address calculations. Will eventually require a
5806 split between the R/M+SIB decoding and the effective address
5807 calculation - which is something that is required for any attempt at
5808 reusing this code for a recompiler. It may also be good to have if we
5809 need to delay #UD exception caused by invalid lock prefixes.
5810
5811 For now, we'll do a mostly safe interpreter-only implementation here. */
5812 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5813 * now until tests show it's checked.. */
5814 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5815
5816 /* Register access is relatively easy and can share code. */
5817 if (IEM_IS_MODRM_REG_MODE(bRm))
5818 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5819
5820 /*
5821 * Memory target.
5822 *
5823 * Intel says that RSP is incremented before it's used in any effective
5824 * address calcuations. This means some serious extra annoyance here since
5825 * we decode and calculate the effective address in one step and like to
5826 * delay committing registers till everything is done.
5827 *
5828 * So, we'll decode and calculate the effective address twice. This will
5829 * require some recoding if turned into a recompiler.
5830 */
5831 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5832
5833#if 1 /* This can be compiled, optimize later if needed. */
5834 switch (pVCpu->iem.s.enmEffOpSize)
5835 {
5836 case IEMMODE_16BIT:
5837 IEM_MC_BEGIN(2, 0, 0);
5838 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5839 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
5841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5842 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5843 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
5844 IEM_MC_END();
5845 break;
5846
5847 case IEMMODE_32BIT:
5848 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386);
5849 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5850 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5851 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
5852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5853 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5854 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
5855 IEM_MC_END();
5856 break;
5857
5858 case IEMMODE_64BIT:
5859 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT);
5860 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5861 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
5863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5864 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5865 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
5866 IEM_MC_END();
5867 break;
5868
5869 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5870 }
5871
5872#else
5873# ifndef TST_IEM_CHECK_MC
5874 /* Calc effective address with modified ESP. */
5875/** @todo testcase */
5876 RTGCPTR GCPtrEff;
5877 VBOXSTRICTRC rcStrict;
5878 switch (pVCpu->iem.s.enmEffOpSize)
5879 {
5880 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
5881 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
5882 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
5883 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5884 }
5885 if (rcStrict != VINF_SUCCESS)
5886 return rcStrict;
5887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5888
5889 /* Perform the operation - this should be CImpl. */
5890 RTUINT64U TmpRsp;
5891 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
5892 switch (pVCpu->iem.s.enmEffOpSize)
5893 {
5894 case IEMMODE_16BIT:
5895 {
5896 uint16_t u16Value;
5897 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
5898 if (rcStrict == VINF_SUCCESS)
5899 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
5900 break;
5901 }
5902
5903 case IEMMODE_32BIT:
5904 {
5905 uint32_t u32Value;
5906 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
5907 if (rcStrict == VINF_SUCCESS)
5908 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
5909 break;
5910 }
5911
5912 case IEMMODE_64BIT:
5913 {
5914 uint64_t u64Value;
5915 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
5916 if (rcStrict == VINF_SUCCESS)
5917 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
5918 break;
5919 }
5920
5921 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5922 }
5923 if (rcStrict == VINF_SUCCESS)
5924 {
5925 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
5926 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
5927 }
5928 return rcStrict;
5929
5930# else
5931 return VERR_IEM_IPE_2;
5932# endif
5933#endif
5934}
5935
5936
5937/**
5938 * @opcode 0x8f
5939 */
5940FNIEMOP_DEF(iemOp_Grp1A__xop)
5941{
5942 /*
5943 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
5944 * three byte VEX prefix, except that the mmmmm field cannot have the values
5945 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
5946 */
5947 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5948 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
5949 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
5950
5951 IEMOP_MNEMONIC(xop, "xop");
5952 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
5953 {
5954 /** @todo Test when exctly the XOP conformance checks kick in during
5955 * instruction decoding and fetching (using \#PF). */
5956 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
5957 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5958 if ( ( pVCpu->iem.s.fPrefixes
5959 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5960 == 0)
5961 {
5962 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
5963 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
5964 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
5965 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
5966 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
5967 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
5968 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
5969 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
5970 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
5971
5972 /** @todo XOP: Just use new tables and decoders. */
5973 switch (bRm & 0x1f)
5974 {
5975 case 8: /* xop opcode map 8. */
5976 IEMOP_BITCH_ABOUT_STUB();
5977 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5978
5979 case 9: /* xop opcode map 9. */
5980 IEMOP_BITCH_ABOUT_STUB();
5981 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5982
5983 case 10: /* xop opcode map 10. */
5984 IEMOP_BITCH_ABOUT_STUB();
5985 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5986
5987 default:
5988 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
5989 IEMOP_RAISE_INVALID_OPCODE_RET();
5990 }
5991 }
5992 else
5993 Log(("XOP: Invalid prefix mix!\n"));
5994 }
5995 else
5996 Log(("XOP: XOP support disabled!\n"));
5997 IEMOP_RAISE_INVALID_OPCODE_RET();
5998}
5999
6000
6001/**
6002 * Common 'xchg reg,rAX' helper.
6003 */
6004FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6005{
6006 iReg |= pVCpu->iem.s.uRexB;
6007 switch (pVCpu->iem.s.enmEffOpSize)
6008 {
6009 case IEMMODE_16BIT:
6010 IEM_MC_BEGIN(0, 2, 0);
6011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6012 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6013 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6014 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6015 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6016 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6017 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6018 IEM_MC_ADVANCE_RIP_AND_FINISH();
6019 IEM_MC_END();
6020 break;
6021
6022 case IEMMODE_32BIT:
6023 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386);
6024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6025 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6026 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6027 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6028 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6029 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6030 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6031 IEM_MC_ADVANCE_RIP_AND_FINISH();
6032 IEM_MC_END();
6033 break;
6034
6035 case IEMMODE_64BIT:
6036 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
6037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6038 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6039 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6040 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6041 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6042 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6043 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6044 IEM_MC_ADVANCE_RIP_AND_FINISH();
6045 IEM_MC_END();
6046 break;
6047
6048 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6049 }
6050}
6051
6052
6053/**
6054 * @opcode 0x90
6055 */
6056FNIEMOP_DEF(iemOp_nop)
6057{
6058 /* R8/R8D and RAX/EAX can be exchanged. */
6059 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6060 {
6061 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6062 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6063 }
6064
6065 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6066 {
6067 IEMOP_MNEMONIC(pause, "pause");
6068 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6069 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6070 if (!IEM_IS_IN_GUEST(pVCpu))
6071 { /* probable */ }
6072#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6073 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6074 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmx_pause);
6075#endif
6076#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6077 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6078 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_svm_pause);
6079#endif
6080 }
6081 else
6082 IEMOP_MNEMONIC(nop, "nop");
6083 /** @todo testcase: lock nop; lock pause */
6084 IEM_MC_BEGIN(0, 0, 0);
6085 IEMOP_HLP_DONE_DECODING();
6086 IEM_MC_ADVANCE_RIP_AND_FINISH();
6087 IEM_MC_END();
6088}
6089
6090
6091/**
6092 * @opcode 0x91
6093 */
6094FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6095{
6096 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6097 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6098}
6099
6100
6101/**
6102 * @opcode 0x92
6103 */
6104FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6105{
6106 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6107 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6108}
6109
6110
6111/**
6112 * @opcode 0x93
6113 */
6114FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6115{
6116 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6117 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6118}
6119
6120
6121/**
6122 * @opcode 0x94
6123 */
6124FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6125{
6126 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6127 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6128}
6129
6130
6131/**
6132 * @opcode 0x95
6133 */
6134FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6135{
6136 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6137 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6138}
6139
6140
6141/**
6142 * @opcode 0x96
6143 */
6144FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6145{
6146 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6147 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6148}
6149
6150
6151/**
6152 * @opcode 0x97
6153 */
6154FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6155{
6156 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6157 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6158}
6159
6160
6161/**
6162 * @opcode 0x98
6163 */
6164FNIEMOP_DEF(iemOp_cbw)
6165{
6166 switch (pVCpu->iem.s.enmEffOpSize)
6167 {
6168 case IEMMODE_16BIT:
6169 IEMOP_MNEMONIC(cbw, "cbw");
6170 IEM_MC_BEGIN(0, 1, 0);
6171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6172 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6173 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6174 } IEM_MC_ELSE() {
6175 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6176 } IEM_MC_ENDIF();
6177 IEM_MC_ADVANCE_RIP_AND_FINISH();
6178 IEM_MC_END();
6179 break;
6180
6181 case IEMMODE_32BIT:
6182 IEMOP_MNEMONIC(cwde, "cwde");
6183 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
6184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6185 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6186 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6187 } IEM_MC_ELSE() {
6188 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6189 } IEM_MC_ENDIF();
6190 IEM_MC_ADVANCE_RIP_AND_FINISH();
6191 IEM_MC_END();
6192 break;
6193
6194 case IEMMODE_64BIT:
6195 IEMOP_MNEMONIC(cdqe, "cdqe");
6196 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
6197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6198 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6199 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6200 } IEM_MC_ELSE() {
6201 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6202 } IEM_MC_ENDIF();
6203 IEM_MC_ADVANCE_RIP_AND_FINISH();
6204 IEM_MC_END();
6205 break;
6206
6207 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6208 }
6209}
6210
6211
6212/**
6213 * @opcode 0x99
6214 */
6215FNIEMOP_DEF(iemOp_cwd)
6216{
6217 switch (pVCpu->iem.s.enmEffOpSize)
6218 {
6219 case IEMMODE_16BIT:
6220 IEMOP_MNEMONIC(cwd, "cwd");
6221 IEM_MC_BEGIN(0, 1, 0);
6222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6223 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6224 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6225 } IEM_MC_ELSE() {
6226 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6227 } IEM_MC_ENDIF();
6228 IEM_MC_ADVANCE_RIP_AND_FINISH();
6229 IEM_MC_END();
6230 break;
6231
6232 case IEMMODE_32BIT:
6233 IEMOP_MNEMONIC(cdq, "cdq");
6234 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
6235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6236 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6237 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6238 } IEM_MC_ELSE() {
6239 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6240 } IEM_MC_ENDIF();
6241 IEM_MC_ADVANCE_RIP_AND_FINISH();
6242 IEM_MC_END();
6243 break;
6244
6245 case IEMMODE_64BIT:
6246 IEMOP_MNEMONIC(cqo, "cqo");
6247 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
6248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6249 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6250 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6251 } IEM_MC_ELSE() {
6252 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6253 } IEM_MC_ENDIF();
6254 IEM_MC_ADVANCE_RIP_AND_FINISH();
6255 IEM_MC_END();
6256 break;
6257
6258 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6259 }
6260}
6261
6262
6263/**
6264 * @opcode 0x9a
6265 */
6266FNIEMOP_DEF(iemOp_call_Ap)
6267{
6268 IEMOP_MNEMONIC(call_Ap, "call Ap");
6269 IEMOP_HLP_NO_64BIT();
6270
6271 /* Decode the far pointer address and pass it on to the far call C implementation. */
6272 uint32_t off32Seg;
6273 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6274 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6275 else
6276 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6277 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6279 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
6280 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
6281 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6282}
6283
6284
6285/** Opcode 0x9b. (aka fwait) */
6286FNIEMOP_DEF(iemOp_wait)
6287{
6288 IEMOP_MNEMONIC(wait, "wait");
6289 IEM_MC_BEGIN(0, 0, 0);
6290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6291 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6292 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6293 IEM_MC_ADVANCE_RIP_AND_FINISH();
6294 IEM_MC_END();
6295}
6296
6297
6298/**
6299 * @opcode 0x9c
6300 */
6301FNIEMOP_DEF(iemOp_pushf_Fv)
6302{
6303 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6305 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6306 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6307}
6308
6309
6310/**
6311 * @opcode 0x9d
6312 */
6313FNIEMOP_DEF(iemOp_popf_Fv)
6314{
6315 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6317 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6318 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6319 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6320}
6321
6322
6323/**
6324 * @opcode 0x9e
6325 */
6326FNIEMOP_DEF(iemOp_sahf)
6327{
6328 IEMOP_MNEMONIC(sahf, "sahf");
6329 if ( IEM_IS_64BIT_CODE(pVCpu)
6330 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6331 IEMOP_RAISE_INVALID_OPCODE_RET();
6332 IEM_MC_BEGIN(0, 2, 0);
6333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6334 IEM_MC_LOCAL(uint32_t, u32Flags);
6335 IEM_MC_LOCAL(uint32_t, EFlags);
6336 IEM_MC_FETCH_EFLAGS(EFlags);
6337 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6338 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6339 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6340 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6341 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6342 IEM_MC_COMMIT_EFLAGS(EFlags);
6343 IEM_MC_ADVANCE_RIP_AND_FINISH();
6344 IEM_MC_END();
6345}
6346
6347
6348/**
6349 * @opcode 0x9f
6350 */
6351FNIEMOP_DEF(iemOp_lahf)
6352{
6353 IEMOP_MNEMONIC(lahf, "lahf");
6354 if ( IEM_IS_64BIT_CODE(pVCpu)
6355 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6356 IEMOP_RAISE_INVALID_OPCODE_RET();
6357 IEM_MC_BEGIN(0, 1, 0);
6358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6359 IEM_MC_LOCAL(uint8_t, u8Flags);
6360 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6361 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6362 IEM_MC_ADVANCE_RIP_AND_FINISH();
6363 IEM_MC_END();
6364}
6365
6366
6367/**
6368 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6369 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6370 * Will return/throw on failures.
6371 * @param a_GCPtrMemOff The variable to store the offset in.
6372 */
6373#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6374 do \
6375 { \
6376 switch (pVCpu->iem.s.enmEffAddrMode) \
6377 { \
6378 case IEMMODE_16BIT: \
6379 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6380 break; \
6381 case IEMMODE_32BIT: \
6382 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6383 break; \
6384 case IEMMODE_64BIT: \
6385 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6386 break; \
6387 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6388 } \
6389 } while (0)
6390
6391/**
6392 * @opcode 0xa0
6393 */
6394FNIEMOP_DEF(iemOp_mov_AL_Ob)
6395{
6396 /*
6397 * Get the offset.
6398 */
6399 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6400 RTGCPTR GCPtrMemOff;
6401 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6402
6403 /*
6404 * Fetch AL.
6405 */
6406 IEM_MC_BEGIN(0, 1, 0);
6407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6408 IEM_MC_LOCAL(uint8_t, u8Tmp);
6409 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6410 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6411 IEM_MC_ADVANCE_RIP_AND_FINISH();
6412 IEM_MC_END();
6413}
6414
6415
6416/**
6417 * @opcode 0xa1
6418 */
6419FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6420{
6421 /*
6422 * Get the offset.
6423 */
6424 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6425 RTGCPTR GCPtrMemOff;
6426 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6427
6428 /*
6429 * Fetch rAX.
6430 */
6431 switch (pVCpu->iem.s.enmEffOpSize)
6432 {
6433 case IEMMODE_16BIT:
6434 IEM_MC_BEGIN(0, 1, 0);
6435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6436 IEM_MC_LOCAL(uint16_t, u16Tmp);
6437 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6438 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6439 IEM_MC_ADVANCE_RIP_AND_FINISH();
6440 IEM_MC_END();
6441 break;
6442
6443 case IEMMODE_32BIT:
6444 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
6445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6446 IEM_MC_LOCAL(uint32_t, u32Tmp);
6447 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6448 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6449 IEM_MC_ADVANCE_RIP_AND_FINISH();
6450 IEM_MC_END();
6451 break;
6452
6453 case IEMMODE_64BIT:
6454 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
6455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6456 IEM_MC_LOCAL(uint64_t, u64Tmp);
6457 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6458 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6459 IEM_MC_ADVANCE_RIP_AND_FINISH();
6460 IEM_MC_END();
6461 break;
6462
6463 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6464 }
6465}
6466
6467
6468/**
6469 * @opcode 0xa2
6470 */
6471FNIEMOP_DEF(iemOp_mov_Ob_AL)
6472{
6473 /*
6474 * Get the offset.
6475 */
6476 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6477 RTGCPTR GCPtrMemOff;
6478 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6479
6480 /*
6481 * Store AL.
6482 */
6483 IEM_MC_BEGIN(0, 1, 0);
6484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6485 IEM_MC_LOCAL(uint8_t, u8Tmp);
6486 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6487 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6488 IEM_MC_ADVANCE_RIP_AND_FINISH();
6489 IEM_MC_END();
6490}
6491
6492
6493/**
6494 * @opcode 0xa3
6495 */
6496FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6497{
6498 /*
6499 * Get the offset.
6500 */
6501 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6502 RTGCPTR GCPtrMemOff;
6503 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6504
6505 /*
6506 * Store rAX.
6507 */
6508 switch (pVCpu->iem.s.enmEffOpSize)
6509 {
6510 case IEMMODE_16BIT:
6511 IEM_MC_BEGIN(0, 1, 0);
6512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6513 IEM_MC_LOCAL(uint16_t, u16Tmp);
6514 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6515 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6516 IEM_MC_ADVANCE_RIP_AND_FINISH();
6517 IEM_MC_END();
6518 break;
6519
6520 case IEMMODE_32BIT:
6521 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
6522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6523 IEM_MC_LOCAL(uint32_t, u32Tmp);
6524 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6525 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6526 IEM_MC_ADVANCE_RIP_AND_FINISH();
6527 IEM_MC_END();
6528 break;
6529
6530 case IEMMODE_64BIT:
6531 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
6532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6533 IEM_MC_LOCAL(uint64_t, u64Tmp);
6534 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6535 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6536 IEM_MC_ADVANCE_RIP_AND_FINISH();
6537 IEM_MC_END();
6538 break;
6539
6540 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6541 }
6542}
6543
6544/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6545#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
6546 IEM_MC_BEGIN(0, 2, a_fMcFlags); \
6547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6548 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6549 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6550 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6551 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6552 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6553 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6554 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6555 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6556 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6557 } IEM_MC_ELSE() { \
6558 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6559 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6560 } IEM_MC_ENDIF(); \
6561 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6562 IEM_MC_END() \
6563
6564/**
6565 * @opcode 0xa4
6566 */
6567FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6568{
6569 /*
6570 * Use the C implementation if a repeat prefix is encountered.
6571 */
6572 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6573 {
6574 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6576 switch (pVCpu->iem.s.enmEffAddrMode)
6577 {
6578 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6579 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6580 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6581 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6582 }
6583 }
6584
6585 /*
6586 * Sharing case implementation with movs[wdq] below.
6587 */
6588 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6589 switch (pVCpu->iem.s.enmEffAddrMode)
6590 {
6591 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6592 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6593 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
6594 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6595 }
6596}
6597
6598
6599/**
6600 * @opcode 0xa5
6601 */
6602FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6603{
6604
6605 /*
6606 * Use the C implementation if a repeat prefix is encountered.
6607 */
6608 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6609 {
6610 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6612 switch (pVCpu->iem.s.enmEffOpSize)
6613 {
6614 case IEMMODE_16BIT:
6615 switch (pVCpu->iem.s.enmEffAddrMode)
6616 {
6617 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6618 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6619 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6620 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6621 }
6622 break;
6623 case IEMMODE_32BIT:
6624 switch (pVCpu->iem.s.enmEffAddrMode)
6625 {
6626 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6627 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6628 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6629 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6630 }
6631 case IEMMODE_64BIT:
6632 switch (pVCpu->iem.s.enmEffAddrMode)
6633 {
6634 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6635 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6636 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6637 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6638 }
6639 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6640 }
6641 }
6642
6643 /*
6644 * Annoying double switch here.
6645 * Using ugly macro for implementing the cases, sharing it with movsb.
6646 */
6647 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6648 switch (pVCpu->iem.s.enmEffOpSize)
6649 {
6650 case IEMMODE_16BIT:
6651 switch (pVCpu->iem.s.enmEffAddrMode)
6652 {
6653 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6654 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6655 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
6656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6657 }
6658 break;
6659
6660 case IEMMODE_32BIT:
6661 switch (pVCpu->iem.s.enmEffAddrMode)
6662 {
6663 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6664 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6665 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
6666 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6667 }
6668 break;
6669
6670 case IEMMODE_64BIT:
6671 switch (pVCpu->iem.s.enmEffAddrMode)
6672 {
6673 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6674 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
6675 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
6676 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6677 }
6678 break;
6679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6680 }
6681}
6682
6683#undef IEM_MOVS_CASE
6684
6685/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6686#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
6687 IEM_MC_BEGIN(3, 3, a_fMcFlags); \
6688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6689 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
6690 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6691 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6692 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6693 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6694 \
6695 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6696 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
6697 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6698 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
6699 IEM_MC_REF_LOCAL(puValue1, uValue1); \
6700 IEM_MC_REF_EFLAGS(pEFlags); \
6701 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6702 \
6703 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6704 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6705 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6706 } IEM_MC_ELSE() { \
6707 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6708 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6709 } IEM_MC_ENDIF(); \
6710 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6711 IEM_MC_END() \
6712
6713/**
6714 * @opcode 0xa6
6715 */
6716FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6717{
6718
6719 /*
6720 * Use the C implementation if a repeat prefix is encountered.
6721 */
6722 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6723 {
6724 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6726 switch (pVCpu->iem.s.enmEffAddrMode)
6727 {
6728 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6729 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6730 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6731 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6732 }
6733 }
6734 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6735 {
6736 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6738 switch (pVCpu->iem.s.enmEffAddrMode)
6739 {
6740 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6741 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6742 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6743 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6744 }
6745 }
6746
6747 /*
6748 * Sharing case implementation with cmps[wdq] below.
6749 */
6750 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
6751 switch (pVCpu->iem.s.enmEffAddrMode)
6752 {
6753 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6754 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6755 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
6756 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6757 }
6758}
6759
6760
6761/**
6762 * @opcode 0xa7
6763 */
6764FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
6765{
6766 /*
6767 * Use the C implementation if a repeat prefix is encountered.
6768 */
6769 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6770 {
6771 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
6772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6773 switch (pVCpu->iem.s.enmEffOpSize)
6774 {
6775 case IEMMODE_16BIT:
6776 switch (pVCpu->iem.s.enmEffAddrMode)
6777 {
6778 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6779 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6780 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6781 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6782 }
6783 break;
6784 case IEMMODE_32BIT:
6785 switch (pVCpu->iem.s.enmEffAddrMode)
6786 {
6787 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6788 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6789 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6790 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6791 }
6792 case IEMMODE_64BIT:
6793 switch (pVCpu->iem.s.enmEffAddrMode)
6794 {
6795 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
6796 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6797 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6798 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6799 }
6800 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6801 }
6802 }
6803
6804 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6805 {
6806 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
6807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6808 switch (pVCpu->iem.s.enmEffOpSize)
6809 {
6810 case IEMMODE_16BIT:
6811 switch (pVCpu->iem.s.enmEffAddrMode)
6812 {
6813 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6814 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6815 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6816 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6817 }
6818 break;
6819 case IEMMODE_32BIT:
6820 switch (pVCpu->iem.s.enmEffAddrMode)
6821 {
6822 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6823 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6824 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6825 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6826 }
6827 case IEMMODE_64BIT:
6828 switch (pVCpu->iem.s.enmEffAddrMode)
6829 {
6830 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
6831 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6832 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6833 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6834 }
6835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6836 }
6837 }
6838
6839 /*
6840 * Annoying double switch here.
6841 * Using ugly macro for implementing the cases, sharing it with cmpsb.
6842 */
6843 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
6844 switch (pVCpu->iem.s.enmEffOpSize)
6845 {
6846 case IEMMODE_16BIT:
6847 switch (pVCpu->iem.s.enmEffAddrMode)
6848 {
6849 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6850 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6851 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
6852 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6853 }
6854 break;
6855
6856 case IEMMODE_32BIT:
6857 switch (pVCpu->iem.s.enmEffAddrMode)
6858 {
6859 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6860 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6861 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
6862 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6863 }
6864 break;
6865
6866 case IEMMODE_64BIT:
6867 switch (pVCpu->iem.s.enmEffAddrMode)
6868 {
6869 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6870 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
6871 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
6872 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6873 }
6874 break;
6875 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6876 }
6877}
6878
6879#undef IEM_CMPS_CASE
6880
6881/**
6882 * @opcode 0xa8
6883 */
6884FNIEMOP_DEF(iemOp_test_AL_Ib)
6885{
6886 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
6887 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6888 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
6889}
6890
6891
6892/**
6893 * @opcode 0xa9
6894 */
6895FNIEMOP_DEF(iemOp_test_eAX_Iz)
6896{
6897 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
6898 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6899 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
6900}
6901
6902
6903/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
6904#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
6905 IEM_MC_BEGIN(0, 2, a_fMcFlags); \
6906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6907 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6908 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6909 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
6910 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6911 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6912 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6913 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6914 } IEM_MC_ELSE() { \
6915 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6916 } IEM_MC_ENDIF(); \
6917 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6918 IEM_MC_END() \
6919
6920/**
6921 * @opcode 0xaa
6922 */
6923FNIEMOP_DEF(iemOp_stosb_Yb_AL)
6924{
6925 /*
6926 * Use the C implementation if a repeat prefix is encountered.
6927 */
6928 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6929 {
6930 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
6931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6932 switch (pVCpu->iem.s.enmEffAddrMode)
6933 {
6934 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m16);
6935 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m32);
6936 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m64);
6937 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6938 }
6939 }
6940
6941 /*
6942 * Sharing case implementation with stos[wdq] below.
6943 */
6944 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
6945 switch (pVCpu->iem.s.enmEffAddrMode)
6946 {
6947 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6948 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6949 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
6950 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6951 }
6952}
6953
6954
6955/**
6956 * @opcode 0xab
6957 */
6958FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
6959{
6960 /*
6961 * Use the C implementation if a repeat prefix is encountered.
6962 */
6963 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6964 {
6965 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
6966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6967 switch (pVCpu->iem.s.enmEffOpSize)
6968 {
6969 case IEMMODE_16BIT:
6970 switch (pVCpu->iem.s.enmEffAddrMode)
6971 {
6972 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m16);
6973 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m32);
6974 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m64);
6975 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6976 }
6977 break;
6978 case IEMMODE_32BIT:
6979 switch (pVCpu->iem.s.enmEffAddrMode)
6980 {
6981 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m16);
6982 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m32);
6983 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m64);
6984 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6985 }
6986 case IEMMODE_64BIT:
6987 switch (pVCpu->iem.s.enmEffAddrMode)
6988 {
6989 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
6990 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m32);
6991 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m64);
6992 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6993 }
6994 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6995 }
6996 }
6997
6998 /*
6999 * Annoying double switch here.
7000 * Using ugly macro for implementing the cases, sharing it with stosb.
7001 */
7002 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7003 switch (pVCpu->iem.s.enmEffOpSize)
7004 {
7005 case IEMMODE_16BIT:
7006 switch (pVCpu->iem.s.enmEffAddrMode)
7007 {
7008 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7009 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7010 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7011 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7012 }
7013 break;
7014
7015 case IEMMODE_32BIT:
7016 switch (pVCpu->iem.s.enmEffAddrMode)
7017 {
7018 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7019 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7020 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7021 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7022 }
7023 break;
7024
7025 case IEMMODE_64BIT:
7026 switch (pVCpu->iem.s.enmEffAddrMode)
7027 {
7028 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7029 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7030 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7031 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7032 }
7033 break;
7034 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7035 }
7036}
7037
7038#undef IEM_STOS_CASE
7039
7040/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7041#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7042 IEM_MC_BEGIN(0, 2, a_fMcFlags); \
7043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7044 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7045 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7046 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7047 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7048 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7049 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7050 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7051 } IEM_MC_ELSE() { \
7052 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7053 } IEM_MC_ENDIF(); \
7054 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7055 IEM_MC_END() \
7056
7057/**
7058 * @opcode 0xac
7059 */
7060FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7061{
7062 /*
7063 * Use the C implementation if a repeat prefix is encountered.
7064 */
7065 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7066 {
7067 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7069 switch (pVCpu->iem.s.enmEffAddrMode)
7070 {
7071 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7072 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7073 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7074 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7075 }
7076 }
7077
7078 /*
7079 * Sharing case implementation with stos[wdq] below.
7080 */
7081 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7082 switch (pVCpu->iem.s.enmEffAddrMode)
7083 {
7084 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7085 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7086 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7087 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7088 }
7089}
7090
7091
7092/**
7093 * @opcode 0xad
7094 */
7095FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7096{
7097 /*
7098 * Use the C implementation if a repeat prefix is encountered.
7099 */
7100 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7101 {
7102 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7104 switch (pVCpu->iem.s.enmEffOpSize)
7105 {
7106 case IEMMODE_16BIT:
7107 switch (pVCpu->iem.s.enmEffAddrMode)
7108 {
7109 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7110 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7111 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7112 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7113 }
7114 break;
7115 case IEMMODE_32BIT:
7116 switch (pVCpu->iem.s.enmEffAddrMode)
7117 {
7118 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7119 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7120 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7121 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7122 }
7123 case IEMMODE_64BIT:
7124 switch (pVCpu->iem.s.enmEffAddrMode)
7125 {
7126 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7127 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7128 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7129 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7130 }
7131 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7132 }
7133 }
7134
7135 /*
7136 * Annoying double switch here.
7137 * Using ugly macro for implementing the cases, sharing it with lodsb.
7138 */
7139 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7140 switch (pVCpu->iem.s.enmEffOpSize)
7141 {
7142 case IEMMODE_16BIT:
7143 switch (pVCpu->iem.s.enmEffAddrMode)
7144 {
7145 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7146 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7147 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
7148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7149 }
7150 break;
7151
7152 case IEMMODE_32BIT:
7153 switch (pVCpu->iem.s.enmEffAddrMode)
7154 {
7155 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7156 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7157 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
7158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7159 }
7160 break;
7161
7162 case IEMMODE_64BIT:
7163 switch (pVCpu->iem.s.enmEffAddrMode)
7164 {
7165 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7166 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
7167 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
7168 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7169 }
7170 break;
7171 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7172 }
7173}
7174
7175#undef IEM_LODS_CASE
7176
7177/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7178#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
7179 IEM_MC_BEGIN(3, 2, a_fMcFlags); \
7180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7181 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7182 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7183 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7184 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7185 \
7186 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7187 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7188 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7189 IEM_MC_REF_EFLAGS(pEFlags); \
7190 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7191 \
7192 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7193 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7194 } IEM_MC_ELSE() { \
7195 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7196 } IEM_MC_ENDIF(); \
7197 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7198 IEM_MC_END();
7199
7200/**
7201 * @opcode 0xae
7202 */
7203FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7204{
7205 /*
7206 * Use the C implementation if a repeat prefix is encountered.
7207 */
7208 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7209 {
7210 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7212 switch (pVCpu->iem.s.enmEffAddrMode)
7213 {
7214 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m16);
7215 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m32);
7216 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m64);
7217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7218 }
7219 }
7220 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7221 {
7222 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7224 switch (pVCpu->iem.s.enmEffAddrMode)
7225 {
7226 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m16);
7227 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m32);
7228 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m64);
7229 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7230 }
7231 }
7232
7233 /*
7234 * Sharing case implementation with stos[wdq] below.
7235 */
7236 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7237 switch (pVCpu->iem.s.enmEffAddrMode)
7238 {
7239 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7240 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7241 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
7242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7243 }
7244}
7245
7246
7247/**
7248 * @opcode 0xaf
7249 */
7250FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7251{
7252 /*
7253 * Use the C implementation if a repeat prefix is encountered.
7254 */
7255 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7256 {
7257 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7259 switch (pVCpu->iem.s.enmEffOpSize)
7260 {
7261 case IEMMODE_16BIT:
7262 switch (pVCpu->iem.s.enmEffAddrMode)
7263 {
7264 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m16);
7265 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m32);
7266 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m64);
7267 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7268 }
7269 break;
7270 case IEMMODE_32BIT:
7271 switch (pVCpu->iem.s.enmEffAddrMode)
7272 {
7273 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m16);
7274 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m32);
7275 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m64);
7276 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7277 }
7278 case IEMMODE_64BIT:
7279 switch (pVCpu->iem.s.enmEffAddrMode)
7280 {
7281 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7282 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m32);
7283 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m64);
7284 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7285 }
7286 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7287 }
7288 }
7289 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7290 {
7291 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7293 switch (pVCpu->iem.s.enmEffOpSize)
7294 {
7295 case IEMMODE_16BIT:
7296 switch (pVCpu->iem.s.enmEffAddrMode)
7297 {
7298 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m16);
7299 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m32);
7300 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m64);
7301 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7302 }
7303 break;
7304 case IEMMODE_32BIT:
7305 switch (pVCpu->iem.s.enmEffAddrMode)
7306 {
7307 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m16);
7308 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m32);
7309 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m64);
7310 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7311 }
7312 case IEMMODE_64BIT:
7313 switch (pVCpu->iem.s.enmEffAddrMode)
7314 {
7315 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
7316 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m32);
7317 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m64);
7318 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7319 }
7320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7321 }
7322 }
7323
7324 /*
7325 * Annoying double switch here.
7326 * Using ugly macro for implementing the cases, sharing it with scasb.
7327 */
7328 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
7329 switch (pVCpu->iem.s.enmEffOpSize)
7330 {
7331 case IEMMODE_16BIT:
7332 switch (pVCpu->iem.s.enmEffAddrMode)
7333 {
7334 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7335 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7336 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
7337 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7338 }
7339 break;
7340
7341 case IEMMODE_32BIT:
7342 switch (pVCpu->iem.s.enmEffAddrMode)
7343 {
7344 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7345 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7346 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
7347 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7348 }
7349 break;
7350
7351 case IEMMODE_64BIT:
7352 switch (pVCpu->iem.s.enmEffAddrMode)
7353 {
7354 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7355 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
7356 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
7357 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7358 }
7359 break;
7360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7361 }
7362}
7363
7364#undef IEM_SCAS_CASE
7365
7366/**
7367 * Common 'mov r8, imm8' helper.
7368 */
7369FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
7370{
7371 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7372 IEM_MC_BEGIN(0, 1, 0);
7373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7374 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
7375 IEM_MC_STORE_GREG_U8(iFixedReg, u8Value);
7376 IEM_MC_ADVANCE_RIP_AND_FINISH();
7377 IEM_MC_END();
7378}
7379
7380
7381/**
7382 * @opcode 0xb0
7383 */
7384FNIEMOP_DEF(iemOp_mov_AL_Ib)
7385{
7386 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
7387 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7388}
7389
7390
7391/**
7392 * @opcode 0xb1
7393 */
7394FNIEMOP_DEF(iemOp_CL_Ib)
7395{
7396 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
7397 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7398}
7399
7400
7401/**
7402 * @opcode 0xb2
7403 */
7404FNIEMOP_DEF(iemOp_DL_Ib)
7405{
7406 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
7407 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7408}
7409
7410
7411/**
7412 * @opcode 0xb3
7413 */
7414FNIEMOP_DEF(iemOp_BL_Ib)
7415{
7416 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
7417 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7418}
7419
7420
7421/**
7422 * @opcode 0xb4
7423 */
7424FNIEMOP_DEF(iemOp_mov_AH_Ib)
7425{
7426 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
7427 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7428}
7429
7430
7431/**
7432 * @opcode 0xb5
7433 */
7434FNIEMOP_DEF(iemOp_CH_Ib)
7435{
7436 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
7437 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7438}
7439
7440
7441/**
7442 * @opcode 0xb6
7443 */
7444FNIEMOP_DEF(iemOp_DH_Ib)
7445{
7446 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
7447 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7448}
7449
7450
7451/**
7452 * @opcode 0xb7
7453 */
7454FNIEMOP_DEF(iemOp_BH_Ib)
7455{
7456 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
7457 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7458}
7459
7460
7461/**
7462 * Common 'mov regX,immX' helper.
7463 */
7464FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
7465{
7466 switch (pVCpu->iem.s.enmEffOpSize)
7467 {
7468 case IEMMODE_16BIT:
7469 IEM_MC_BEGIN(0, 1, 0);
7470 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7472 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
7473 IEM_MC_STORE_GREG_U16(iFixedReg, u16Value);
7474 IEM_MC_ADVANCE_RIP_AND_FINISH();
7475 IEM_MC_END();
7476 break;
7477
7478 case IEMMODE_32BIT:
7479 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
7480 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7482 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
7483 IEM_MC_STORE_GREG_U32(iFixedReg, u32Value);
7484 IEM_MC_ADVANCE_RIP_AND_FINISH();
7485 IEM_MC_END();
7486 break;
7487
7488 case IEMMODE_64BIT:
7489 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
7490 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
7491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7492 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
7493 IEM_MC_STORE_GREG_U64(iFixedReg, u64Value);
7494 IEM_MC_ADVANCE_RIP_AND_FINISH();
7495 IEM_MC_END();
7496 break;
7497 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7498 }
7499}
7500
7501
7502/**
7503 * @opcode 0xb8
7504 */
7505FNIEMOP_DEF(iemOp_eAX_Iv)
7506{
7507 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
7508 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7509}
7510
7511
7512/**
7513 * @opcode 0xb9
7514 */
7515FNIEMOP_DEF(iemOp_eCX_Iv)
7516{
7517 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
7518 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7519}
7520
7521
7522/**
7523 * @opcode 0xba
7524 */
7525FNIEMOP_DEF(iemOp_eDX_Iv)
7526{
7527 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
7528 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7529}
7530
7531
7532/**
7533 * @opcode 0xbb
7534 */
7535FNIEMOP_DEF(iemOp_eBX_Iv)
7536{
7537 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
7538 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7539}
7540
7541
7542/**
7543 * @opcode 0xbc
7544 */
7545FNIEMOP_DEF(iemOp_eSP_Iv)
7546{
7547 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
7548 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7549}
7550
7551
7552/**
7553 * @opcode 0xbd
7554 */
7555FNIEMOP_DEF(iemOp_eBP_Iv)
7556{
7557 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
7558 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7559}
7560
7561
7562/**
7563 * @opcode 0xbe
7564 */
7565FNIEMOP_DEF(iemOp_eSI_Iv)
7566{
7567 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
7568 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7569}
7570
7571
7572/**
7573 * @opcode 0xbf
7574 */
7575FNIEMOP_DEF(iemOp_eDI_Iv)
7576{
7577 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
7578 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7579}
7580
7581
7582/**
7583 * @opcode 0xc0
7584 */
7585FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
7586{
7587 IEMOP_HLP_MIN_186();
7588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7589 PCIEMOPSHIFTSIZES pImpl;
7590 switch (IEM_GET_MODRM_REG_8(bRm))
7591 {
7592 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
7593 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
7594 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
7595 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
7596 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
7597 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
7598 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
7599 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7600 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7601 }
7602 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7603
7604 if (IEM_IS_MODRM_REG_MODE(bRm))
7605 {
7606 /* register */
7607 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7608 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186);
7609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7610 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7611 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7612 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7613 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7614 IEM_MC_REF_EFLAGS(pEFlags);
7615 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7616 IEM_MC_ADVANCE_RIP_AND_FINISH();
7617 IEM_MC_END();
7618 }
7619 else
7620 {
7621 /* memory */
7622 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186);
7623 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7624 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7625 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7626 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7627 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7628
7629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7630 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7631 IEM_MC_ASSIGN(cShiftArg, cShift);
7632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7633 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7634 IEM_MC_FETCH_EFLAGS(EFlags);
7635 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7636
7637 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
7638 IEM_MC_COMMIT_EFLAGS(EFlags);
7639 IEM_MC_ADVANCE_RIP_AND_FINISH();
7640 IEM_MC_END();
7641 }
7642}
7643
7644
7645/**
7646 * @opcode 0xc1
7647 */
7648FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
7649{
7650 IEMOP_HLP_MIN_186();
7651 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7652 PCIEMOPSHIFTSIZES pImpl;
7653 switch (IEM_GET_MODRM_REG_8(bRm))
7654 {
7655 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
7656 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
7657 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
7658 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
7659 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
7660 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
7661 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
7662 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7663 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7664 }
7665 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7666
7667 if (IEM_IS_MODRM_REG_MODE(bRm))
7668 {
7669 /* register */
7670 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7671 switch (pVCpu->iem.s.enmEffOpSize)
7672 {
7673 case IEMMODE_16BIT:
7674 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186);
7675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7676 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7677 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7678 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7679 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7680 IEM_MC_REF_EFLAGS(pEFlags);
7681 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7682 IEM_MC_ADVANCE_RIP_AND_FINISH();
7683 IEM_MC_END();
7684 break;
7685
7686 case IEMMODE_32BIT:
7687 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386);
7688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7689 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7690 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7691 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7692 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7693 IEM_MC_REF_EFLAGS(pEFlags);
7694 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7695 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7696 IEM_MC_ADVANCE_RIP_AND_FINISH();
7697 IEM_MC_END();
7698 break;
7699
7700 case IEMMODE_64BIT:
7701 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT);
7702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7703 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7704 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7705 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7706 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7707 IEM_MC_REF_EFLAGS(pEFlags);
7708 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7709 IEM_MC_ADVANCE_RIP_AND_FINISH();
7710 IEM_MC_END();
7711 break;
7712
7713 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7714 }
7715 }
7716 else
7717 {
7718 /* memory */
7719 switch (pVCpu->iem.s.enmEffOpSize)
7720 {
7721 case IEMMODE_16BIT:
7722 IEM_MC_BEGIN(3, 3, 0);
7723 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7724 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7725 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7727 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7728
7729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7730 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7731 IEM_MC_ASSIGN(cShiftArg, cShift);
7732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7733 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7734 IEM_MC_FETCH_EFLAGS(EFlags);
7735 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7736
7737 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
7738 IEM_MC_COMMIT_EFLAGS(EFlags);
7739 IEM_MC_ADVANCE_RIP_AND_FINISH();
7740 IEM_MC_END();
7741 break;
7742
7743 case IEMMODE_32BIT:
7744 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386);
7745 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7746 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7747 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7748 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7749 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7750
7751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7752 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7753 IEM_MC_ASSIGN(cShiftArg, cShift);
7754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7755 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7756 IEM_MC_FETCH_EFLAGS(EFlags);
7757 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7758
7759 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
7760 IEM_MC_COMMIT_EFLAGS(EFlags);
7761 IEM_MC_ADVANCE_RIP_AND_FINISH();
7762 IEM_MC_END();
7763 break;
7764
7765 case IEMMODE_64BIT:
7766 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT);
7767 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7768 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7769 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7770 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7771 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7772
7773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7774 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7775 IEM_MC_ASSIGN(cShiftArg, cShift);
7776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7777 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7778 IEM_MC_FETCH_EFLAGS(EFlags);
7779 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7780
7781 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
7782 IEM_MC_COMMIT_EFLAGS(EFlags);
7783 IEM_MC_ADVANCE_RIP_AND_FINISH();
7784 IEM_MC_END();
7785 break;
7786
7787 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7788 }
7789 }
7790}
7791
7792
7793/**
7794 * @opcode 0xc2
7795 */
7796FNIEMOP_DEF(iemOp_retn_Iw)
7797{
7798 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
7799 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7800 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7802 switch (pVCpu->iem.s.enmEffOpSize)
7803 {
7804 case IEMMODE_16BIT:
7805 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_16, u16Imm);
7806 case IEMMODE_32BIT:
7807 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_32, u16Imm);
7808 case IEMMODE_64BIT:
7809 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_64, u16Imm);
7810 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7811 }
7812}
7813
7814
7815/**
7816 * @opcode 0xc3
7817 */
7818FNIEMOP_DEF(iemOp_retn)
7819{
7820 IEMOP_MNEMONIC(retn, "retn");
7821 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7823 switch (pVCpu->iem.s.enmEffOpSize)
7824 {
7825 case IEMMODE_16BIT:
7826 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_16);
7827 case IEMMODE_32BIT:
7828 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_32);
7829 case IEMMODE_64BIT:
7830 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_64);
7831 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7832 }
7833}
7834
7835
7836/**
7837 * @opcode 0xc4
7838 */
7839FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
7840{
7841 /* The LDS instruction is invalid 64-bit mode. In legacy and
7842 compatability mode it is invalid with MOD=3.
7843 The use as a VEX prefix is made possible by assigning the inverted
7844 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
7845 outside of 64-bit mode. VEX is not available in real or v86 mode. */
7846 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7847 if ( IEM_IS_64BIT_CODE(pVCpu)
7848 || IEM_IS_MODRM_REG_MODE(bRm) )
7849 {
7850 IEMOP_MNEMONIC(vex3_prefix, "vex3");
7851 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7852 {
7853 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7854 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7855 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
7856 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7857 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7858 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
7859 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
7860 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7861 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
7862 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
7863 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
7864 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
7865 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
7866
7867 switch (bRm & 0x1f)
7868 {
7869 case 1: /* 0x0f lead opcode byte. */
7870#ifdef IEM_WITH_VEX
7871 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7872#else
7873 IEMOP_BITCH_ABOUT_STUB();
7874 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7875#endif
7876
7877 case 2: /* 0x0f 0x38 lead opcode bytes. */
7878#ifdef IEM_WITH_VEX
7879 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7880#else
7881 IEMOP_BITCH_ABOUT_STUB();
7882 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7883#endif
7884
7885 case 3: /* 0x0f 0x3a lead opcode bytes. */
7886#ifdef IEM_WITH_VEX
7887 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7888#else
7889 IEMOP_BITCH_ABOUT_STUB();
7890 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7891#endif
7892
7893 default:
7894 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
7895 IEMOP_RAISE_INVALID_OPCODE_RET();
7896 }
7897 }
7898 Log(("VEX3: VEX support disabled!\n"));
7899 IEMOP_RAISE_INVALID_OPCODE_RET();
7900 }
7901
7902 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
7903 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
7904}
7905
7906
7907/**
7908 * @opcode 0xc5
7909 */
7910FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
7911{
7912 /* The LES instruction is invalid 64-bit mode. In legacy and
7913 compatability mode it is invalid with MOD=3.
7914 The use as a VEX prefix is made possible by assigning the inverted
7915 REX.R to the top MOD bit, and the top bit in the inverted register
7916 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
7917 to accessing registers 0..7 in this VEX form. */
7918 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7919 if ( IEM_IS_64BIT_CODE(pVCpu)
7920 || IEM_IS_MODRM_REG_MODE(bRm))
7921 {
7922 IEMOP_MNEMONIC(vex2_prefix, "vex2");
7923 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7924 {
7925 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7926 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7927 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7928 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7929 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7930 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
7931 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
7932 pVCpu->iem.s.idxPrefix = bRm & 0x3;
7933
7934#ifdef IEM_WITH_VEX
7935 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7936#else
7937 IEMOP_BITCH_ABOUT_STUB();
7938 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7939#endif
7940 }
7941
7942 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
7943 Log(("VEX2: VEX support disabled!\n"));
7944 IEMOP_RAISE_INVALID_OPCODE_RET();
7945 }
7946
7947 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
7948 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
7949}
7950
7951
7952/**
7953 * @opcode 0xc6
7954 */
7955FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
7956{
7957 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7958 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
7959 IEMOP_RAISE_INVALID_OPCODE_RET();
7960 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
7961
7962 if (IEM_IS_MODRM_REG_MODE(bRm))
7963 {
7964 /* register access */
7965 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7966 IEM_MC_BEGIN(0, 0, 0);
7967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7968 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
7969 IEM_MC_ADVANCE_RIP_AND_FINISH();
7970 IEM_MC_END();
7971 }
7972 else
7973 {
7974 /* memory access. */
7975 IEM_MC_BEGIN(0, 1, 0);
7976 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7977 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7978 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7980 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
7981 IEM_MC_ADVANCE_RIP_AND_FINISH();
7982 IEM_MC_END();
7983 }
7984}
7985
7986
7987/**
7988 * @opcode 0xc7
7989 */
7990FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
7991{
7992 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7993 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
7994 IEMOP_RAISE_INVALID_OPCODE_RET();
7995 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
7996
7997 if (IEM_IS_MODRM_REG_MODE(bRm))
7998 {
7999 /* register access */
8000 switch (pVCpu->iem.s.enmEffOpSize)
8001 {
8002 case IEMMODE_16BIT:
8003 IEM_MC_BEGIN(0, 0, 0);
8004 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8006 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8007 IEM_MC_ADVANCE_RIP_AND_FINISH();
8008 IEM_MC_END();
8009 break;
8010
8011 case IEMMODE_32BIT:
8012 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8013 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8015 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8016 IEM_MC_ADVANCE_RIP_AND_FINISH();
8017 IEM_MC_END();
8018 break;
8019
8020 case IEMMODE_64BIT:
8021 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT);
8022 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8024 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8025 IEM_MC_ADVANCE_RIP_AND_FINISH();
8026 IEM_MC_END();
8027 break;
8028
8029 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8030 }
8031 }
8032 else
8033 {
8034 /* memory access. */
8035 switch (pVCpu->iem.s.enmEffOpSize)
8036 {
8037 case IEMMODE_16BIT:
8038 IEM_MC_BEGIN(0, 1, 0);
8039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8041 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8043 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8044 IEM_MC_ADVANCE_RIP_AND_FINISH();
8045 IEM_MC_END();
8046 break;
8047
8048 case IEMMODE_32BIT:
8049 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8052 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8054 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8055 IEM_MC_ADVANCE_RIP_AND_FINISH();
8056 IEM_MC_END();
8057 break;
8058
8059 case IEMMODE_64BIT:
8060 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
8061 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8063 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8065 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8066 IEM_MC_ADVANCE_RIP_AND_FINISH();
8067 IEM_MC_END();
8068 break;
8069
8070 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8071 }
8072 }
8073}
8074
8075
8076
8077
8078/**
8079 * @opcode 0xc8
8080 */
8081FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8082{
8083 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8084 IEMOP_HLP_MIN_186();
8085 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8086 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
8087 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
8088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8089 IEM_MC_DEFER_TO_CIMPL_3_RET(0, iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
8090}
8091
8092
8093/**
8094 * @opcode 0xc9
8095 */
8096FNIEMOP_DEF(iemOp_leave)
8097{
8098 IEMOP_MNEMONIC(leave, "leave");
8099 IEMOP_HLP_MIN_186();
8100 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8102 IEM_MC_DEFER_TO_CIMPL_1_RET(0, iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
8103}
8104
8105
8106/**
8107 * @opcode 0xca
8108 */
8109FNIEMOP_DEF(iemOp_retf_Iw)
8110{
8111 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
8112 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8114 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
8115 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
8116}
8117
8118
8119/**
8120 * @opcode 0xcb
8121 */
8122FNIEMOP_DEF(iemOp_retf)
8123{
8124 IEMOP_MNEMONIC(retf, "retf");
8125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8126 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
8127 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
8128}
8129
8130
8131/**
8132 * @opcode 0xcc
8133 */
8134FNIEMOP_DEF(iemOp_int3)
8135{
8136 IEMOP_MNEMONIC(int3, "int3");
8137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8138 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8139 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8140 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
8141}
8142
8143
8144/**
8145 * @opcode 0xcd
8146 */
8147FNIEMOP_DEF(iemOp_int_Ib)
8148{
8149 IEMOP_MNEMONIC(int_Ib, "int Ib");
8150 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
8151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8152 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8153 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8154 iemCImpl_int, u8Int, IEMINT_INTN);
8155}
8156
8157
8158/**
8159 * @opcode 0xce
8160 */
8161FNIEMOP_DEF(iemOp_into)
8162{
8163 IEMOP_MNEMONIC(into, "into");
8164 IEMOP_HLP_NO_64BIT();
8165 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_CONDITIONAL
8166 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8167 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
8168}
8169
8170
8171/**
8172 * @opcode 0xcf
8173 */
8174FNIEMOP_DEF(iemOp_iret)
8175{
8176 IEMOP_MNEMONIC(iret, "iret");
8177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8178 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8179 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
8180 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
8181}
8182
8183
8184/**
8185 * @opcode 0xd0
8186 */
8187FNIEMOP_DEF(iemOp_Grp2_Eb_1)
8188{
8189 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8190 PCIEMOPSHIFTSIZES pImpl;
8191 switch (IEM_GET_MODRM_REG_8(bRm))
8192 {
8193 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
8194 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
8195 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
8196 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
8197 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
8198 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
8199 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
8200 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8201 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8202 }
8203 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8204
8205 if (IEM_IS_MODRM_REG_MODE(bRm))
8206 {
8207 /* register */
8208 IEM_MC_BEGIN(3, 0, 0);
8209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8210 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8211 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8212 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8213 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8214 IEM_MC_REF_EFLAGS(pEFlags);
8215 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8216 IEM_MC_ADVANCE_RIP_AND_FINISH();
8217 IEM_MC_END();
8218 }
8219 else
8220 {
8221 /* memory */
8222 IEM_MC_BEGIN(3, 3, 0);
8223 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8224 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8225 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8227 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8228
8229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8231 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8232 IEM_MC_FETCH_EFLAGS(EFlags);
8233 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8234
8235 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8236 IEM_MC_COMMIT_EFLAGS(EFlags);
8237 IEM_MC_ADVANCE_RIP_AND_FINISH();
8238 IEM_MC_END();
8239 }
8240}
8241
8242
8243
8244/**
8245 * @opcode 0xd1
8246 */
8247FNIEMOP_DEF(iemOp_Grp2_Ev_1)
8248{
8249 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8250 PCIEMOPSHIFTSIZES pImpl;
8251 switch (IEM_GET_MODRM_REG_8(bRm))
8252 {
8253 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
8254 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
8255 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
8256 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
8257 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
8258 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
8259 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
8260 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8261 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8262 }
8263 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8264
8265 if (IEM_IS_MODRM_REG_MODE(bRm))
8266 {
8267 /* register */
8268 switch (pVCpu->iem.s.enmEffOpSize)
8269 {
8270 case IEMMODE_16BIT:
8271 IEM_MC_BEGIN(3, 0, 0);
8272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8273 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8274 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8275 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8276 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8277 IEM_MC_REF_EFLAGS(pEFlags);
8278 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8279 IEM_MC_ADVANCE_RIP_AND_FINISH();
8280 IEM_MC_END();
8281 break;
8282
8283 case IEMMODE_32BIT:
8284 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386);
8285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8286 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8287 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8288 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8289 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8290 IEM_MC_REF_EFLAGS(pEFlags);
8291 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8292 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8293 IEM_MC_ADVANCE_RIP_AND_FINISH();
8294 IEM_MC_END();
8295 break;
8296
8297 case IEMMODE_64BIT:
8298 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT);
8299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8300 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8301 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8302 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8303 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8304 IEM_MC_REF_EFLAGS(pEFlags);
8305 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8306 IEM_MC_ADVANCE_RIP_AND_FINISH();
8307 IEM_MC_END();
8308 break;
8309
8310 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8311 }
8312 }
8313 else
8314 {
8315 /* memory */
8316 switch (pVCpu->iem.s.enmEffOpSize)
8317 {
8318 case IEMMODE_16BIT:
8319 IEM_MC_BEGIN(3, 3, 0);
8320 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8321 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8322 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8324 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8325
8326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8328 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8329 IEM_MC_FETCH_EFLAGS(EFlags);
8330 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8331
8332 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
8333 IEM_MC_COMMIT_EFLAGS(EFlags);
8334 IEM_MC_ADVANCE_RIP_AND_FINISH();
8335 IEM_MC_END();
8336 break;
8337
8338 case IEMMODE_32BIT:
8339 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386);
8340 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8341 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8342 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8344 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8345
8346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8348 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8349 IEM_MC_FETCH_EFLAGS(EFlags);
8350 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8351
8352 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
8353 IEM_MC_COMMIT_EFLAGS(EFlags);
8354 IEM_MC_ADVANCE_RIP_AND_FINISH();
8355 IEM_MC_END();
8356 break;
8357
8358 case IEMMODE_64BIT:
8359 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT);
8360 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8361 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8362 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8363 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8364 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8365
8366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8368 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8369 IEM_MC_FETCH_EFLAGS(EFlags);
8370 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8371
8372 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
8373 IEM_MC_COMMIT_EFLAGS(EFlags);
8374 IEM_MC_ADVANCE_RIP_AND_FINISH();
8375 IEM_MC_END();
8376 break;
8377
8378 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8379 }
8380 }
8381}
8382
8383
8384/**
8385 * @opcode 0xd2
8386 */
8387FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
8388{
8389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8390 PCIEMOPSHIFTSIZES pImpl;
8391 switch (IEM_GET_MODRM_REG_8(bRm))
8392 {
8393 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
8394 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
8395 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
8396 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
8397 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
8398 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
8399 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
8400 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8401 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
8402 }
8403 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8404
8405 if (IEM_IS_MODRM_REG_MODE(bRm))
8406 {
8407 /* register */
8408 IEM_MC_BEGIN(3, 0, 0);
8409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8410 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8411 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8412 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8413 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8414 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8415 IEM_MC_REF_EFLAGS(pEFlags);
8416 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8417 IEM_MC_ADVANCE_RIP_AND_FINISH();
8418 IEM_MC_END();
8419 }
8420 else
8421 {
8422 /* memory */
8423 IEM_MC_BEGIN(3, 3, 0);
8424 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8425 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8426 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8428 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8429
8430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8432 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8433 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8434 IEM_MC_FETCH_EFLAGS(EFlags);
8435 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8436
8437 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8438 IEM_MC_COMMIT_EFLAGS(EFlags);
8439 IEM_MC_ADVANCE_RIP_AND_FINISH();
8440 IEM_MC_END();
8441 }
8442}
8443
8444
8445/**
8446 * @opcode 0xd3
8447 */
8448FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
8449{
8450 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8451 PCIEMOPSHIFTSIZES pImpl;
8452 switch (IEM_GET_MODRM_REG_8(bRm))
8453 {
8454 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
8455 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
8456 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
8457 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
8458 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
8459 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
8460 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
8461 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8462 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8463 }
8464 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8465
8466 if (IEM_IS_MODRM_REG_MODE(bRm))
8467 {
8468 /* register */
8469 switch (pVCpu->iem.s.enmEffOpSize)
8470 {
8471 case IEMMODE_16BIT:
8472 IEM_MC_BEGIN(3, 0, 0);
8473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8474 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8475 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8476 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8477 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8478 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8479 IEM_MC_REF_EFLAGS(pEFlags);
8480 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8481 IEM_MC_ADVANCE_RIP_AND_FINISH();
8482 IEM_MC_END();
8483 break;
8484
8485 case IEMMODE_32BIT:
8486 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386);
8487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8488 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8489 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8490 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8491 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8492 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8493 IEM_MC_REF_EFLAGS(pEFlags);
8494 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8495 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8496 IEM_MC_ADVANCE_RIP_AND_FINISH();
8497 IEM_MC_END();
8498 break;
8499
8500 case IEMMODE_64BIT:
8501 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT);
8502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8503 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8504 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8505 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8506 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8507 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8508 IEM_MC_REF_EFLAGS(pEFlags);
8509 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8510 IEM_MC_ADVANCE_RIP_AND_FINISH();
8511 IEM_MC_END();
8512 break;
8513
8514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8515 }
8516 }
8517 else
8518 {
8519 /* memory */
8520 switch (pVCpu->iem.s.enmEffOpSize)
8521 {
8522 case IEMMODE_16BIT:
8523 IEM_MC_BEGIN(3, 3, 0);
8524 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8525 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8526 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8528 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8529
8530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8532 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8533 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8534 IEM_MC_FETCH_EFLAGS(EFlags);
8535 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8536
8537 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
8538 IEM_MC_COMMIT_EFLAGS(EFlags);
8539 IEM_MC_ADVANCE_RIP_AND_FINISH();
8540 IEM_MC_END();
8541 break;
8542
8543 case IEMMODE_32BIT:
8544 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386);
8545 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8546 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8547 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8549 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8550
8551 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8553 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8554 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8555 IEM_MC_FETCH_EFLAGS(EFlags);
8556 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8557
8558 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
8559 IEM_MC_COMMIT_EFLAGS(EFlags);
8560 IEM_MC_ADVANCE_RIP_AND_FINISH();
8561 IEM_MC_END();
8562 break;
8563
8564 case IEMMODE_64BIT:
8565 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT);
8566 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8567 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8568 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8570 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8571
8572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8574 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8575 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8576 IEM_MC_FETCH_EFLAGS(EFlags);
8577 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8578
8579 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
8580 IEM_MC_COMMIT_EFLAGS(EFlags);
8581 IEM_MC_ADVANCE_RIP_AND_FINISH();
8582 IEM_MC_END();
8583 break;
8584
8585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8586 }
8587 }
8588}
8589
8590/**
8591 * @opcode 0xd4
8592 */
8593FNIEMOP_DEF(iemOp_aam_Ib)
8594{
8595 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
8596 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
8597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8598 IEMOP_HLP_NO_64BIT();
8599 if (!bImm)
8600 IEMOP_RAISE_DIVIDE_ERROR_RET();
8601 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aam, bImm);
8602}
8603
8604
8605/**
8606 * @opcode 0xd5
8607 */
8608FNIEMOP_DEF(iemOp_aad_Ib)
8609{
8610 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
8611 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
8612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8613 IEMOP_HLP_NO_64BIT();
8614 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aad, bImm);
8615}
8616
8617
8618/**
8619 * @opcode 0xd6
8620 */
8621FNIEMOP_DEF(iemOp_salc)
8622{
8623 IEMOP_MNEMONIC(salc, "salc");
8624 IEMOP_HLP_NO_64BIT();
8625
8626 IEM_MC_BEGIN(0, 0, 0);
8627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8628 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8629 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
8630 } IEM_MC_ELSE() {
8631 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
8632 } IEM_MC_ENDIF();
8633 IEM_MC_ADVANCE_RIP_AND_FINISH();
8634 IEM_MC_END();
8635}
8636
8637
8638/**
8639 * @opcode 0xd7
8640 */
8641FNIEMOP_DEF(iemOp_xlat)
8642{
8643 IEMOP_MNEMONIC(xlat, "xlat");
8644 switch (pVCpu->iem.s.enmEffAddrMode)
8645 {
8646 case IEMMODE_16BIT:
8647 IEM_MC_BEGIN(2, 0, 0);
8648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8649 IEM_MC_LOCAL(uint8_t, u8Tmp);
8650 IEM_MC_LOCAL(uint16_t, u16Addr);
8651 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
8652 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
8653 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
8654 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8655 IEM_MC_ADVANCE_RIP_AND_FINISH();
8656 IEM_MC_END();
8657 break;
8658
8659 case IEMMODE_32BIT:
8660 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386);
8661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8662 IEM_MC_LOCAL(uint8_t, u8Tmp);
8663 IEM_MC_LOCAL(uint32_t, u32Addr);
8664 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
8665 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
8666 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
8667 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8668 IEM_MC_ADVANCE_RIP_AND_FINISH();
8669 IEM_MC_END();
8670 break;
8671
8672 case IEMMODE_64BIT:
8673 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT);
8674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8675 IEM_MC_LOCAL(uint8_t, u8Tmp);
8676 IEM_MC_LOCAL(uint64_t, u64Addr);
8677 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
8678 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
8679 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
8680 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8681 IEM_MC_ADVANCE_RIP_AND_FINISH();
8682 IEM_MC_END();
8683 break;
8684
8685 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8686 }
8687}
8688
8689
8690/**
8691 * Common worker for FPU instructions working on ST0 and STn, and storing the
8692 * result in ST0.
8693 *
8694 * @param bRm Mod R/M byte.
8695 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8696 */
8697FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8698{
8699 IEM_MC_BEGIN(3, 1, 0);
8700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8701 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8702 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8703 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8704 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8705
8706 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8707 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8708 IEM_MC_PREPARE_FPU_USAGE();
8709 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8710 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8711 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8712 } IEM_MC_ELSE() {
8713 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8714 } IEM_MC_ENDIF();
8715 IEM_MC_ADVANCE_RIP_AND_FINISH();
8716
8717 IEM_MC_END();
8718}
8719
8720
8721/**
8722 * Common worker for FPU instructions working on ST0 and STn, and only affecting
8723 * flags.
8724 *
8725 * @param bRm Mod R/M byte.
8726 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8727 */
8728FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8729{
8730 IEM_MC_BEGIN(3, 1, 0);
8731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8732 IEM_MC_LOCAL(uint16_t, u16Fsw);
8733 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8734 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8735 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8736
8737 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8738 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8739 IEM_MC_PREPARE_FPU_USAGE();
8740 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8741 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8742 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8743 } IEM_MC_ELSE() {
8744 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8745 } IEM_MC_ENDIF();
8746 IEM_MC_ADVANCE_RIP_AND_FINISH();
8747
8748 IEM_MC_END();
8749}
8750
8751
8752/**
8753 * Common worker for FPU instructions working on ST0 and STn, only affecting
8754 * flags, and popping when done.
8755 *
8756 * @param bRm Mod R/M byte.
8757 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8758 */
8759FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8760{
8761 IEM_MC_BEGIN(3, 1, 0);
8762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8763 IEM_MC_LOCAL(uint16_t, u16Fsw);
8764 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8765 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8766 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8767
8768 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8769 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8770 IEM_MC_PREPARE_FPU_USAGE();
8771 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8772 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8773 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8774 } IEM_MC_ELSE() {
8775 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8776 } IEM_MC_ENDIF();
8777 IEM_MC_ADVANCE_RIP_AND_FINISH();
8778
8779 IEM_MC_END();
8780}
8781
8782
8783/** Opcode 0xd8 11/0. */
8784FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
8785{
8786 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
8787 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
8788}
8789
8790
8791/** Opcode 0xd8 11/1. */
8792FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
8793{
8794 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
8795 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
8796}
8797
8798
8799/** Opcode 0xd8 11/2. */
8800FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
8801{
8802 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
8803 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
8804}
8805
8806
8807/** Opcode 0xd8 11/3. */
8808FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
8809{
8810 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
8811 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
8812}
8813
8814
8815/** Opcode 0xd8 11/4. */
8816FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
8817{
8818 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
8819 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
8820}
8821
8822
8823/** Opcode 0xd8 11/5. */
8824FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
8825{
8826 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
8827 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
8828}
8829
8830
8831/** Opcode 0xd8 11/6. */
8832FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
8833{
8834 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
8835 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
8836}
8837
8838
8839/** Opcode 0xd8 11/7. */
8840FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
8841{
8842 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
8843 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
8844}
8845
8846
8847/**
8848 * Common worker for FPU instructions working on ST0 and an m32r, and storing
8849 * the result in ST0.
8850 *
8851 * @param bRm Mod R/M byte.
8852 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8853 */
8854FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
8855{
8856 IEM_MC_BEGIN(3, 3, 0);
8857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8858 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8859 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8860 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8861 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8862 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8863
8864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8866
8867 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8868 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8869 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8870
8871 IEM_MC_PREPARE_FPU_USAGE();
8872 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8873 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
8874 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8875 } IEM_MC_ELSE() {
8876 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8877 } IEM_MC_ENDIF();
8878 IEM_MC_ADVANCE_RIP_AND_FINISH();
8879
8880 IEM_MC_END();
8881}
8882
8883
8884/** Opcode 0xd8 !11/0. */
8885FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
8886{
8887 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
8888 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
8889}
8890
8891
8892/** Opcode 0xd8 !11/1. */
8893FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
8894{
8895 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
8896 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
8897}
8898
8899
8900/** Opcode 0xd8 !11/2. */
8901FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
8902{
8903 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
8904
8905 IEM_MC_BEGIN(3, 3, 0);
8906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8907 IEM_MC_LOCAL(uint16_t, u16Fsw);
8908 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8909 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8910 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8911 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8912
8913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8915
8916 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8917 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8918 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8919
8920 IEM_MC_PREPARE_FPU_USAGE();
8921 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8922 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8923 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8924 } IEM_MC_ELSE() {
8925 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8926 } IEM_MC_ENDIF();
8927 IEM_MC_ADVANCE_RIP_AND_FINISH();
8928
8929 IEM_MC_END();
8930}
8931
8932
8933/** Opcode 0xd8 !11/3. */
8934FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
8935{
8936 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
8937
8938 IEM_MC_BEGIN(3, 3, 0);
8939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8940 IEM_MC_LOCAL(uint16_t, u16Fsw);
8941 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8942 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8943 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8944 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8945
8946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8948
8949 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8950 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8951 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8952
8953 IEM_MC_PREPARE_FPU_USAGE();
8954 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8955 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8956 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8957 } IEM_MC_ELSE() {
8958 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8959 } IEM_MC_ENDIF();
8960 IEM_MC_ADVANCE_RIP_AND_FINISH();
8961
8962 IEM_MC_END();
8963}
8964
8965
8966/** Opcode 0xd8 !11/4. */
8967FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
8968{
8969 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
8970 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
8971}
8972
8973
8974/** Opcode 0xd8 !11/5. */
8975FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
8976{
8977 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
8978 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
8979}
8980
8981
8982/** Opcode 0xd8 !11/6. */
8983FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
8984{
8985 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
8986 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
8987}
8988
8989
8990/** Opcode 0xd8 !11/7. */
8991FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
8992{
8993 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
8994 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
8995}
8996
8997
8998/**
8999 * @opcode 0xd8
9000 */
9001FNIEMOP_DEF(iemOp_EscF0)
9002{
9003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9004 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
9005
9006 if (IEM_IS_MODRM_REG_MODE(bRm))
9007 {
9008 switch (IEM_GET_MODRM_REG_8(bRm))
9009 {
9010 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
9011 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
9012 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
9013 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9014 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
9015 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
9016 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
9017 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
9018 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9019 }
9020 }
9021 else
9022 {
9023 switch (IEM_GET_MODRM_REG_8(bRm))
9024 {
9025 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
9026 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
9027 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
9028 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
9029 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
9030 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
9031 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
9032 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
9033 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9034 }
9035 }
9036}
9037
9038
9039/** Opcode 0xd9 /0 mem32real
9040 * @sa iemOp_fld_m64r */
9041FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
9042{
9043 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
9044
9045 IEM_MC_BEGIN(2, 3, 0);
9046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9047 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9048 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
9049 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9050 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
9051
9052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9054
9055 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9056 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9057 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9058 IEM_MC_PREPARE_FPU_USAGE();
9059 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9060 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
9061 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9062 } IEM_MC_ELSE() {
9063 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9064 } IEM_MC_ENDIF();
9065 IEM_MC_ADVANCE_RIP_AND_FINISH();
9066
9067 IEM_MC_END();
9068}
9069
9070
9071/** Opcode 0xd9 !11/2 mem32real */
9072FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
9073{
9074 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
9075 IEM_MC_BEGIN(3, 2, 0);
9076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9077 IEM_MC_LOCAL(uint16_t, u16Fsw);
9078 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9079 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9080 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9081
9082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9084 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9085 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9086
9087 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9088 IEM_MC_PREPARE_FPU_USAGE();
9089 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9090 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9091 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9092 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9093 } IEM_MC_ELSE() {
9094 IEM_MC_IF_FCW_IM() {
9095 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9096 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9097 } IEM_MC_ENDIF();
9098 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9099 } IEM_MC_ENDIF();
9100 IEM_MC_ADVANCE_RIP_AND_FINISH();
9101
9102 IEM_MC_END();
9103}
9104
9105
9106/** Opcode 0xd9 !11/3 */
9107FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
9108{
9109 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
9110 IEM_MC_BEGIN(3, 2, 0);
9111 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9112 IEM_MC_LOCAL(uint16_t, u16Fsw);
9113 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9114 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9115 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9116
9117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9119 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9120 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9121
9122 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9123 IEM_MC_PREPARE_FPU_USAGE();
9124 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9125 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9126 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9127 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9128 } IEM_MC_ELSE() {
9129 IEM_MC_IF_FCW_IM() {
9130 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9131 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9132 } IEM_MC_ENDIF();
9133 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9134 } IEM_MC_ENDIF();
9135 IEM_MC_ADVANCE_RIP_AND_FINISH();
9136
9137 IEM_MC_END();
9138}
9139
9140
9141/** Opcode 0xd9 !11/4 */
9142FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
9143{
9144 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
9145 IEM_MC_BEGIN(3, 0, 0);
9146 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9147 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9148 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9151 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9152 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9153 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9154 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9155 IEM_MC_END();
9156}
9157
9158
9159/** Opcode 0xd9 !11/5 */
9160FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
9161{
9162 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
9163 IEM_MC_BEGIN(1, 1, 0);
9164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9165 IEM_MC_ARG(uint16_t, u16Fsw, 0);
9166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9168 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9169 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9170 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9171 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, iemCImpl_fldcw, u16Fsw);
9172 IEM_MC_END();
9173}
9174
9175
9176/** Opcode 0xd9 !11/6 */
9177FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
9178{
9179 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
9180 IEM_MC_BEGIN(3, 0, 0);
9181 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9182 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9183 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9186 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9187 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9188 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9189 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
9190 IEM_MC_END();
9191}
9192
9193
9194/** Opcode 0xd9 !11/7 */
9195FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
9196{
9197 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
9198 IEM_MC_BEGIN(2, 0, 0);
9199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9200 IEM_MC_LOCAL(uint16_t, u16Fcw);
9201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9203 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9204 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9205 IEM_MC_FETCH_FCW(u16Fcw);
9206 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
9207 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9208 IEM_MC_END();
9209}
9210
9211
9212/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
9213FNIEMOP_DEF(iemOp_fnop)
9214{
9215 IEMOP_MNEMONIC(fnop, "fnop");
9216 IEM_MC_BEGIN(0, 0, 0);
9217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9218 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9219 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9220 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9221 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
9222 * intel optimizations. Investigate. */
9223 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9224 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9225 IEM_MC_END();
9226}
9227
9228
9229/** Opcode 0xd9 11/0 stN */
9230FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
9231{
9232 IEMOP_MNEMONIC(fld_stN, "fld stN");
9233 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9234 * indicates that it does. */
9235 IEM_MC_BEGIN(0, 2, 0);
9236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9237 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9238 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9239 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9240 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9241
9242 IEM_MC_PREPARE_FPU_USAGE();
9243 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
9244 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9245 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9246 } IEM_MC_ELSE() {
9247 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
9248 } IEM_MC_ENDIF();
9249
9250 IEM_MC_ADVANCE_RIP_AND_FINISH();
9251 IEM_MC_END();
9252}
9253
9254
9255/** Opcode 0xd9 11/3 stN */
9256FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
9257{
9258 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
9259 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9260 * indicates that it does. */
9261 IEM_MC_BEGIN(2, 3, 0);
9262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9263 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
9264 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
9265 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9266 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
9267 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
9268 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9269 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9270
9271 IEM_MC_PREPARE_FPU_USAGE();
9272 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9273 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
9274 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
9275 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9276 } IEM_MC_ELSE() {
9277 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
9278 } IEM_MC_ENDIF();
9279
9280 IEM_MC_ADVANCE_RIP_AND_FINISH();
9281 IEM_MC_END();
9282}
9283
9284
9285/** Opcode 0xd9 11/4, 0xdd 11/2. */
9286FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
9287{
9288 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
9289
9290 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
9291 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
9292 if (!iDstReg)
9293 {
9294 IEM_MC_BEGIN(0, 1, 0);
9295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9296 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
9297 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9298 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9299
9300 IEM_MC_PREPARE_FPU_USAGE();
9301 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
9302 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9303 } IEM_MC_ELSE() {
9304 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
9305 } IEM_MC_ENDIF();
9306
9307 IEM_MC_ADVANCE_RIP_AND_FINISH();
9308 IEM_MC_END();
9309 }
9310 else
9311 {
9312 IEM_MC_BEGIN(0, 2, 0);
9313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9314 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9315 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9316 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9317 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9318
9319 IEM_MC_PREPARE_FPU_USAGE();
9320 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9321 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9322 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
9323 } IEM_MC_ELSE() {
9324 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
9325 } IEM_MC_ENDIF();
9326
9327 IEM_MC_ADVANCE_RIP_AND_FINISH();
9328 IEM_MC_END();
9329 }
9330}
9331
9332
9333/**
9334 * Common worker for FPU instructions working on ST0 and replaces it with the
9335 * result, i.e. unary operators.
9336 *
9337 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9338 */
9339FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
9340{
9341 IEM_MC_BEGIN(2, 1, 0);
9342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9343 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9344 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9345 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9346
9347 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9348 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9349 IEM_MC_PREPARE_FPU_USAGE();
9350 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9351 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
9352 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9353 } IEM_MC_ELSE() {
9354 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9355 } IEM_MC_ENDIF();
9356 IEM_MC_ADVANCE_RIP_AND_FINISH();
9357
9358 IEM_MC_END();
9359}
9360
9361
9362/** Opcode 0xd9 0xe0. */
9363FNIEMOP_DEF(iemOp_fchs)
9364{
9365 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
9366 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
9367}
9368
9369
9370/** Opcode 0xd9 0xe1. */
9371FNIEMOP_DEF(iemOp_fabs)
9372{
9373 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
9374 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
9375}
9376
9377
9378/** Opcode 0xd9 0xe4. */
9379FNIEMOP_DEF(iemOp_ftst)
9380{
9381 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
9382 IEM_MC_BEGIN(2, 1, 0);
9383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9384 IEM_MC_LOCAL(uint16_t, u16Fsw);
9385 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9386 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9387
9388 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9389 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9390 IEM_MC_PREPARE_FPU_USAGE();
9391 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9392 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
9393 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9394 } IEM_MC_ELSE() {
9395 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9396 } IEM_MC_ENDIF();
9397 IEM_MC_ADVANCE_RIP_AND_FINISH();
9398
9399 IEM_MC_END();
9400}
9401
9402
9403/** Opcode 0xd9 0xe5. */
9404FNIEMOP_DEF(iemOp_fxam)
9405{
9406 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
9407 IEM_MC_BEGIN(2, 1, 0);
9408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9409 IEM_MC_LOCAL(uint16_t, u16Fsw);
9410 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9411 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9412
9413 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9414 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9415 IEM_MC_PREPARE_FPU_USAGE();
9416 IEM_MC_REF_FPUREG(pr80Value, 0);
9417 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
9418 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9419 IEM_MC_ADVANCE_RIP_AND_FINISH();
9420
9421 IEM_MC_END();
9422}
9423
9424
9425/**
9426 * Common worker for FPU instructions pushing a constant onto the FPU stack.
9427 *
9428 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9429 */
9430FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
9431{
9432 IEM_MC_BEGIN(1, 1, 0);
9433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9434 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9435 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9436
9437 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9438 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9439 IEM_MC_PREPARE_FPU_USAGE();
9440 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9441 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
9442 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9443 } IEM_MC_ELSE() {
9444 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
9445 } IEM_MC_ENDIF();
9446 IEM_MC_ADVANCE_RIP_AND_FINISH();
9447
9448 IEM_MC_END();
9449}
9450
9451
9452/** Opcode 0xd9 0xe8. */
9453FNIEMOP_DEF(iemOp_fld1)
9454{
9455 IEMOP_MNEMONIC(fld1, "fld1");
9456 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
9457}
9458
9459
9460/** Opcode 0xd9 0xe9. */
9461FNIEMOP_DEF(iemOp_fldl2t)
9462{
9463 IEMOP_MNEMONIC(fldl2t, "fldl2t");
9464 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
9465}
9466
9467
9468/** Opcode 0xd9 0xea. */
9469FNIEMOP_DEF(iemOp_fldl2e)
9470{
9471 IEMOP_MNEMONIC(fldl2e, "fldl2e");
9472 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
9473}
9474
9475/** Opcode 0xd9 0xeb. */
9476FNIEMOP_DEF(iemOp_fldpi)
9477{
9478 IEMOP_MNEMONIC(fldpi, "fldpi");
9479 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
9480}
9481
9482
9483/** Opcode 0xd9 0xec. */
9484FNIEMOP_DEF(iemOp_fldlg2)
9485{
9486 IEMOP_MNEMONIC(fldlg2, "fldlg2");
9487 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
9488}
9489
9490/** Opcode 0xd9 0xed. */
9491FNIEMOP_DEF(iemOp_fldln2)
9492{
9493 IEMOP_MNEMONIC(fldln2, "fldln2");
9494 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
9495}
9496
9497
9498/** Opcode 0xd9 0xee. */
9499FNIEMOP_DEF(iemOp_fldz)
9500{
9501 IEMOP_MNEMONIC(fldz, "fldz");
9502 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
9503}
9504
9505
9506/** Opcode 0xd9 0xf0.
9507 *
9508 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
9509 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
9510 * to produce proper results for +Inf and -Inf.
9511 *
9512 * This is probably usful in the implementation pow() and similar.
9513 */
9514FNIEMOP_DEF(iemOp_f2xm1)
9515{
9516 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
9517 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
9518}
9519
9520
9521/**
9522 * Common worker for FPU instructions working on STn and ST0, storing the result
9523 * in STn, and popping the stack unless IE, DE or ZE was raised.
9524 *
9525 * @param bRm Mod R/M byte.
9526 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9527 */
9528FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9529{
9530 IEM_MC_BEGIN(3, 1, 0);
9531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9532 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9533 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9534 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9535 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9536
9537 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9538 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9539
9540 IEM_MC_PREPARE_FPU_USAGE();
9541 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
9542 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9543 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
9544 } IEM_MC_ELSE() {
9545 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
9546 } IEM_MC_ENDIF();
9547 IEM_MC_ADVANCE_RIP_AND_FINISH();
9548
9549 IEM_MC_END();
9550}
9551
9552
9553/** Opcode 0xd9 0xf1. */
9554FNIEMOP_DEF(iemOp_fyl2x)
9555{
9556 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
9557 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
9558}
9559
9560
9561/**
9562 * Common worker for FPU instructions working on ST0 and having two outputs, one
9563 * replacing ST0 and one pushed onto the stack.
9564 *
9565 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9566 */
9567FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
9568{
9569 IEM_MC_BEGIN(2, 1, 0);
9570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9571 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
9572 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
9573 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9574
9575 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9576 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9577 IEM_MC_PREPARE_FPU_USAGE();
9578 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9579 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
9580 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
9581 } IEM_MC_ELSE() {
9582 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
9583 } IEM_MC_ENDIF();
9584 IEM_MC_ADVANCE_RIP_AND_FINISH();
9585
9586 IEM_MC_END();
9587}
9588
9589
9590/** Opcode 0xd9 0xf2. */
9591FNIEMOP_DEF(iemOp_fptan)
9592{
9593 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
9594 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
9595}
9596
9597
9598/** Opcode 0xd9 0xf3. */
9599FNIEMOP_DEF(iemOp_fpatan)
9600{
9601 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
9602 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
9603}
9604
9605
9606/** Opcode 0xd9 0xf4. */
9607FNIEMOP_DEF(iemOp_fxtract)
9608{
9609 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
9610 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
9611}
9612
9613
9614/** Opcode 0xd9 0xf5. */
9615FNIEMOP_DEF(iemOp_fprem1)
9616{
9617 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
9618 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
9619}
9620
9621
9622/** Opcode 0xd9 0xf6. */
9623FNIEMOP_DEF(iemOp_fdecstp)
9624{
9625 IEMOP_MNEMONIC(fdecstp, "fdecstp");
9626 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9627 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9628 * FINCSTP and FDECSTP. */
9629 IEM_MC_BEGIN(0, 0, 0);
9630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9631
9632 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9633 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9634
9635 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9636 IEM_MC_FPU_STACK_DEC_TOP();
9637 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9638
9639 IEM_MC_ADVANCE_RIP_AND_FINISH();
9640 IEM_MC_END();
9641}
9642
9643
9644/** Opcode 0xd9 0xf7. */
9645FNIEMOP_DEF(iemOp_fincstp)
9646{
9647 IEMOP_MNEMONIC(fincstp, "fincstp");
9648 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9649 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9650 * FINCSTP and FDECSTP. */
9651 IEM_MC_BEGIN(0, 0, 0);
9652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9653
9654 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9655 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9656
9657 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9658 IEM_MC_FPU_STACK_INC_TOP();
9659 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9660
9661 IEM_MC_ADVANCE_RIP_AND_FINISH();
9662 IEM_MC_END();
9663}
9664
9665
9666/** Opcode 0xd9 0xf8. */
9667FNIEMOP_DEF(iemOp_fprem)
9668{
9669 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
9670 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
9671}
9672
9673
9674/** Opcode 0xd9 0xf9. */
9675FNIEMOP_DEF(iemOp_fyl2xp1)
9676{
9677 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
9678 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
9679}
9680
9681
9682/** Opcode 0xd9 0xfa. */
9683FNIEMOP_DEF(iemOp_fsqrt)
9684{
9685 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
9686 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
9687}
9688
9689
9690/** Opcode 0xd9 0xfb. */
9691FNIEMOP_DEF(iemOp_fsincos)
9692{
9693 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
9694 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
9695}
9696
9697
9698/** Opcode 0xd9 0xfc. */
9699FNIEMOP_DEF(iemOp_frndint)
9700{
9701 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
9702 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
9703}
9704
9705
9706/** Opcode 0xd9 0xfd. */
9707FNIEMOP_DEF(iemOp_fscale)
9708{
9709 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
9710 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
9711}
9712
9713
9714/** Opcode 0xd9 0xfe. */
9715FNIEMOP_DEF(iemOp_fsin)
9716{
9717 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
9718 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
9719}
9720
9721
9722/** Opcode 0xd9 0xff. */
9723FNIEMOP_DEF(iemOp_fcos)
9724{
9725 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
9726 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
9727}
9728
9729
9730/** Used by iemOp_EscF1. */
9731IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
9732{
9733 /* 0xe0 */ iemOp_fchs,
9734 /* 0xe1 */ iemOp_fabs,
9735 /* 0xe2 */ iemOp_Invalid,
9736 /* 0xe3 */ iemOp_Invalid,
9737 /* 0xe4 */ iemOp_ftst,
9738 /* 0xe5 */ iemOp_fxam,
9739 /* 0xe6 */ iemOp_Invalid,
9740 /* 0xe7 */ iemOp_Invalid,
9741 /* 0xe8 */ iemOp_fld1,
9742 /* 0xe9 */ iemOp_fldl2t,
9743 /* 0xea */ iemOp_fldl2e,
9744 /* 0xeb */ iemOp_fldpi,
9745 /* 0xec */ iemOp_fldlg2,
9746 /* 0xed */ iemOp_fldln2,
9747 /* 0xee */ iemOp_fldz,
9748 /* 0xef */ iemOp_Invalid,
9749 /* 0xf0 */ iemOp_f2xm1,
9750 /* 0xf1 */ iemOp_fyl2x,
9751 /* 0xf2 */ iemOp_fptan,
9752 /* 0xf3 */ iemOp_fpatan,
9753 /* 0xf4 */ iemOp_fxtract,
9754 /* 0xf5 */ iemOp_fprem1,
9755 /* 0xf6 */ iemOp_fdecstp,
9756 /* 0xf7 */ iemOp_fincstp,
9757 /* 0xf8 */ iemOp_fprem,
9758 /* 0xf9 */ iemOp_fyl2xp1,
9759 /* 0xfa */ iemOp_fsqrt,
9760 /* 0xfb */ iemOp_fsincos,
9761 /* 0xfc */ iemOp_frndint,
9762 /* 0xfd */ iemOp_fscale,
9763 /* 0xfe */ iemOp_fsin,
9764 /* 0xff */ iemOp_fcos
9765};
9766
9767
9768/**
9769 * @opcode 0xd9
9770 */
9771FNIEMOP_DEF(iemOp_EscF1)
9772{
9773 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9774 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
9775
9776 if (IEM_IS_MODRM_REG_MODE(bRm))
9777 {
9778 switch (IEM_GET_MODRM_REG_8(bRm))
9779 {
9780 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
9781 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
9782 case 2:
9783 if (bRm == 0xd0)
9784 return FNIEMOP_CALL(iemOp_fnop);
9785 IEMOP_RAISE_INVALID_OPCODE_RET();
9786 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
9787 case 4:
9788 case 5:
9789 case 6:
9790 case 7:
9791 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
9792 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
9793 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9794 }
9795 }
9796 else
9797 {
9798 switch (IEM_GET_MODRM_REG_8(bRm))
9799 {
9800 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
9801 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
9802 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
9803 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
9804 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
9805 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
9806 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
9807 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
9808 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9809 }
9810 }
9811}
9812
9813
9814/** Opcode 0xda 11/0. */
9815FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
9816{
9817 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
9818 IEM_MC_BEGIN(0, 1, 0);
9819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9820 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9821
9822 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9823 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9824
9825 IEM_MC_PREPARE_FPU_USAGE();
9826 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9827 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9828 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9829 } IEM_MC_ENDIF();
9830 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9831 } IEM_MC_ELSE() {
9832 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9833 } IEM_MC_ENDIF();
9834 IEM_MC_ADVANCE_RIP_AND_FINISH();
9835
9836 IEM_MC_END();
9837}
9838
9839
9840/** Opcode 0xda 11/1. */
9841FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
9842{
9843 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
9844 IEM_MC_BEGIN(0, 1, 0);
9845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9846 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9847
9848 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9849 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9850
9851 IEM_MC_PREPARE_FPU_USAGE();
9852 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9853 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9854 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9855 } IEM_MC_ENDIF();
9856 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9857 } IEM_MC_ELSE() {
9858 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9859 } IEM_MC_ENDIF();
9860 IEM_MC_ADVANCE_RIP_AND_FINISH();
9861
9862 IEM_MC_END();
9863}
9864
9865
9866/** Opcode 0xda 11/2. */
9867FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
9868{
9869 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
9870 IEM_MC_BEGIN(0, 1, 0);
9871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9872 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9873
9874 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9875 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9876
9877 IEM_MC_PREPARE_FPU_USAGE();
9878 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9879 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9880 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9881 } IEM_MC_ENDIF();
9882 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9883 } IEM_MC_ELSE() {
9884 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9885 } IEM_MC_ENDIF();
9886 IEM_MC_ADVANCE_RIP_AND_FINISH();
9887
9888 IEM_MC_END();
9889}
9890
9891
9892/** Opcode 0xda 11/3. */
9893FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
9894{
9895 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
9896 IEM_MC_BEGIN(0, 1, 0);
9897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9898 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9899
9900 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9901 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9902
9903 IEM_MC_PREPARE_FPU_USAGE();
9904 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9905 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9906 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9907 } IEM_MC_ENDIF();
9908 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9909 } IEM_MC_ELSE() {
9910 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9911 } IEM_MC_ENDIF();
9912 IEM_MC_ADVANCE_RIP_AND_FINISH();
9913
9914 IEM_MC_END();
9915}
9916
9917
9918/**
9919 * Common worker for FPU instructions working on ST0 and ST1, only affecting
9920 * flags, and popping twice when done.
9921 *
9922 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9923 */
9924FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9925{
9926 IEM_MC_BEGIN(3, 1, 0);
9927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9928 IEM_MC_LOCAL(uint16_t, u16Fsw);
9929 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9930 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9931 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9932
9933 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9934 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9935
9936 IEM_MC_PREPARE_FPU_USAGE();
9937 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
9938 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9939 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9940 } IEM_MC_ELSE() {
9941 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
9942 } IEM_MC_ENDIF();
9943 IEM_MC_ADVANCE_RIP_AND_FINISH();
9944
9945 IEM_MC_END();
9946}
9947
9948
9949/** Opcode 0xda 0xe9. */
9950FNIEMOP_DEF(iemOp_fucompp)
9951{
9952 IEMOP_MNEMONIC(fucompp, "fucompp");
9953 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
9954}
9955
9956
9957/**
9958 * Common worker for FPU instructions working on ST0 and an m32i, and storing
9959 * the result in ST0.
9960 *
9961 * @param bRm Mod R/M byte.
9962 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9963 */
9964FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
9965{
9966 IEM_MC_BEGIN(3, 3, 0);
9967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9968 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9969 IEM_MC_LOCAL(int32_t, i32Val2);
9970 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9971 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9972 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9973
9974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9976
9977 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9978 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9979 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9980
9981 IEM_MC_PREPARE_FPU_USAGE();
9982 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9983 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
9984 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9985 } IEM_MC_ELSE() {
9986 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9987 } IEM_MC_ENDIF();
9988 IEM_MC_ADVANCE_RIP_AND_FINISH();
9989
9990 IEM_MC_END();
9991}
9992
9993
9994/** Opcode 0xda !11/0. */
9995FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
9996{
9997 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
9998 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
9999}
10000
10001
10002/** Opcode 0xda !11/1. */
10003FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
10004{
10005 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
10006 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
10007}
10008
10009
10010/** Opcode 0xda !11/2. */
10011FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
10012{
10013 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
10014
10015 IEM_MC_BEGIN(3, 3, 0);
10016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10017 IEM_MC_LOCAL(uint16_t, u16Fsw);
10018 IEM_MC_LOCAL(int32_t, i32Val2);
10019 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10020 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10021 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10022
10023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10025
10026 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10027 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10028 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10029
10030 IEM_MC_PREPARE_FPU_USAGE();
10031 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10032 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10033 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10034 } IEM_MC_ELSE() {
10035 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10036 } IEM_MC_ENDIF();
10037 IEM_MC_ADVANCE_RIP_AND_FINISH();
10038
10039 IEM_MC_END();
10040}
10041
10042
10043/** Opcode 0xda !11/3. */
10044FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
10045{
10046 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
10047
10048 IEM_MC_BEGIN(3, 3, 0);
10049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10050 IEM_MC_LOCAL(uint16_t, u16Fsw);
10051 IEM_MC_LOCAL(int32_t, i32Val2);
10052 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10053 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10054 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10055
10056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10058
10059 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10060 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10061 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10062
10063 IEM_MC_PREPARE_FPU_USAGE();
10064 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10065 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10066 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10067 } IEM_MC_ELSE() {
10068 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10069 } IEM_MC_ENDIF();
10070 IEM_MC_ADVANCE_RIP_AND_FINISH();
10071
10072 IEM_MC_END();
10073}
10074
10075
10076/** Opcode 0xda !11/4. */
10077FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
10078{
10079 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
10080 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
10081}
10082
10083
10084/** Opcode 0xda !11/5. */
10085FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
10086{
10087 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
10088 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
10089}
10090
10091
10092/** Opcode 0xda !11/6. */
10093FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
10094{
10095 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
10096 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
10097}
10098
10099
10100/** Opcode 0xda !11/7. */
10101FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
10102{
10103 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
10104 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
10105}
10106
10107
10108/**
10109 * @opcode 0xda
10110 */
10111FNIEMOP_DEF(iemOp_EscF2)
10112{
10113 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10114 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
10115 if (IEM_IS_MODRM_REG_MODE(bRm))
10116 {
10117 switch (IEM_GET_MODRM_REG_8(bRm))
10118 {
10119 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
10120 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
10121 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
10122 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
10123 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10124 case 5:
10125 if (bRm == 0xe9)
10126 return FNIEMOP_CALL(iemOp_fucompp);
10127 IEMOP_RAISE_INVALID_OPCODE_RET();
10128 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10129 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10130 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10131 }
10132 }
10133 else
10134 {
10135 switch (IEM_GET_MODRM_REG_8(bRm))
10136 {
10137 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
10138 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
10139 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
10140 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
10141 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
10142 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
10143 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
10144 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
10145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10146 }
10147 }
10148}
10149
10150
10151/** Opcode 0xdb !11/0. */
10152FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
10153{
10154 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
10155
10156 IEM_MC_BEGIN(2, 3, 0);
10157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10158 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10159 IEM_MC_LOCAL(int32_t, i32Val);
10160 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10161 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
10162
10163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10165
10166 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10167 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10168 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10169
10170 IEM_MC_PREPARE_FPU_USAGE();
10171 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10172 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
10173 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10174 } IEM_MC_ELSE() {
10175 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10176 } IEM_MC_ENDIF();
10177 IEM_MC_ADVANCE_RIP_AND_FINISH();
10178
10179 IEM_MC_END();
10180}
10181
10182
10183/** Opcode 0xdb !11/1. */
10184FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
10185{
10186 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
10187 IEM_MC_BEGIN(3, 2, 0);
10188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10189 IEM_MC_LOCAL(uint16_t, u16Fsw);
10190 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10191 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10192 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10193
10194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10196 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10197 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10198
10199 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10200 IEM_MC_PREPARE_FPU_USAGE();
10201 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10202 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10203 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10204 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10205 } IEM_MC_ELSE() {
10206 IEM_MC_IF_FCW_IM() {
10207 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10208 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10209 } IEM_MC_ENDIF();
10210 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10211 } IEM_MC_ENDIF();
10212 IEM_MC_ADVANCE_RIP_AND_FINISH();
10213
10214 IEM_MC_END();
10215}
10216
10217
10218/** Opcode 0xdb !11/2. */
10219FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
10220{
10221 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
10222 IEM_MC_BEGIN(3, 2, 0);
10223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10224 IEM_MC_LOCAL(uint16_t, u16Fsw);
10225 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10226 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10227 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10228
10229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10231 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10232 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10233
10234 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10235 IEM_MC_PREPARE_FPU_USAGE();
10236 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10237 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10238 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10239 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10240 } IEM_MC_ELSE() {
10241 IEM_MC_IF_FCW_IM() {
10242 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10243 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10244 } IEM_MC_ENDIF();
10245 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10246 } IEM_MC_ENDIF();
10247 IEM_MC_ADVANCE_RIP_AND_FINISH();
10248
10249 IEM_MC_END();
10250}
10251
10252
10253/** Opcode 0xdb !11/3. */
10254FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
10255{
10256 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
10257 IEM_MC_BEGIN(3, 2, 0);
10258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10259 IEM_MC_LOCAL(uint16_t, u16Fsw);
10260 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10261 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10262 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10263
10264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10266 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10267 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10268
10269 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10270 IEM_MC_PREPARE_FPU_USAGE();
10271 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10272 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10273 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10274 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10275 } IEM_MC_ELSE() {
10276 IEM_MC_IF_FCW_IM() {
10277 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10278 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10279 } IEM_MC_ENDIF();
10280 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10281 } IEM_MC_ENDIF();
10282 IEM_MC_ADVANCE_RIP_AND_FINISH();
10283
10284 IEM_MC_END();
10285}
10286
10287
10288/** Opcode 0xdb !11/5. */
10289FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
10290{
10291 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
10292
10293 IEM_MC_BEGIN(2, 3, 0);
10294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10295 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10296 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
10297 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10298 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
10299
10300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10302
10303 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10304 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10305 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10306
10307 IEM_MC_PREPARE_FPU_USAGE();
10308 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10309 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
10310 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10311 } IEM_MC_ELSE() {
10312 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10313 } IEM_MC_ENDIF();
10314 IEM_MC_ADVANCE_RIP_AND_FINISH();
10315
10316 IEM_MC_END();
10317}
10318
10319
10320/** Opcode 0xdb !11/7. */
10321FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
10322{
10323 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
10324 IEM_MC_BEGIN(3, 2, 0);
10325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10326 IEM_MC_LOCAL(uint16_t, u16Fsw);
10327 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10328 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
10329 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10330
10331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10333 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10334 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10335
10336 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
10337 IEM_MC_PREPARE_FPU_USAGE();
10338 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10339 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
10340 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
10341 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10342 } IEM_MC_ELSE() {
10343 IEM_MC_IF_FCW_IM() {
10344 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
10345 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
10346 } IEM_MC_ENDIF();
10347 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10348 } IEM_MC_ENDIF();
10349 IEM_MC_ADVANCE_RIP_AND_FINISH();
10350
10351 IEM_MC_END();
10352}
10353
10354
10355/** Opcode 0xdb 11/0. */
10356FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
10357{
10358 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
10359 IEM_MC_BEGIN(0, 1, 0);
10360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10361 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10362
10363 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10364 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10365
10366 IEM_MC_PREPARE_FPU_USAGE();
10367 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10368 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
10369 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10370 } IEM_MC_ENDIF();
10371 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10372 } IEM_MC_ELSE() {
10373 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10374 } IEM_MC_ENDIF();
10375 IEM_MC_ADVANCE_RIP_AND_FINISH();
10376
10377 IEM_MC_END();
10378}
10379
10380
10381/** Opcode 0xdb 11/1. */
10382FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
10383{
10384 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
10385 IEM_MC_BEGIN(0, 1, 0);
10386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10387 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10388
10389 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10390 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10391
10392 IEM_MC_PREPARE_FPU_USAGE();
10393 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10394 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10395 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10396 } IEM_MC_ENDIF();
10397 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10398 } IEM_MC_ELSE() {
10399 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10400 } IEM_MC_ENDIF();
10401 IEM_MC_ADVANCE_RIP_AND_FINISH();
10402
10403 IEM_MC_END();
10404}
10405
10406
10407/** Opcode 0xdb 11/2. */
10408FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
10409{
10410 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
10411 IEM_MC_BEGIN(0, 1, 0);
10412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10413 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10414
10415 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10416 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10417
10418 IEM_MC_PREPARE_FPU_USAGE();
10419 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10420 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10421 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10422 } IEM_MC_ENDIF();
10423 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10424 } IEM_MC_ELSE() {
10425 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10426 } IEM_MC_ENDIF();
10427 IEM_MC_ADVANCE_RIP_AND_FINISH();
10428
10429 IEM_MC_END();
10430}
10431
10432
10433/** Opcode 0xdb 11/3. */
10434FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
10435{
10436 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
10437 IEM_MC_BEGIN(0, 1, 0);
10438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10439 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10440
10441 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10442 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10443
10444 IEM_MC_PREPARE_FPU_USAGE();
10445 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10446 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
10447 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10448 } IEM_MC_ENDIF();
10449 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10450 } IEM_MC_ELSE() {
10451 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10452 } IEM_MC_ENDIF();
10453 IEM_MC_ADVANCE_RIP_AND_FINISH();
10454
10455 IEM_MC_END();
10456}
10457
10458
10459/** Opcode 0xdb 0xe0. */
10460FNIEMOP_DEF(iemOp_fneni)
10461{
10462 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
10463 IEM_MC_BEGIN(0, 0, 0);
10464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10465 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10466 IEM_MC_ADVANCE_RIP_AND_FINISH();
10467 IEM_MC_END();
10468}
10469
10470
10471/** Opcode 0xdb 0xe1. */
10472FNIEMOP_DEF(iemOp_fndisi)
10473{
10474 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
10475 IEM_MC_BEGIN(0, 0, 0);
10476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10477 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10478 IEM_MC_ADVANCE_RIP_AND_FINISH();
10479 IEM_MC_END();
10480}
10481
10482
10483/** Opcode 0xdb 0xe2. */
10484FNIEMOP_DEF(iemOp_fnclex)
10485{
10486 IEMOP_MNEMONIC(fnclex, "fnclex");
10487 IEM_MC_BEGIN(0, 0, 0);
10488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10489 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10490 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10491 IEM_MC_CLEAR_FSW_EX();
10492 IEM_MC_ADVANCE_RIP_AND_FINISH();
10493 IEM_MC_END();
10494}
10495
10496
10497/** Opcode 0xdb 0xe3. */
10498FNIEMOP_DEF(iemOp_fninit)
10499{
10500 IEMOP_MNEMONIC(fninit, "fninit");
10501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10502 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, iemCImpl_finit, false /*fCheckXcpts*/);
10503}
10504
10505
10506/** Opcode 0xdb 0xe4. */
10507FNIEMOP_DEF(iemOp_fnsetpm)
10508{
10509 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
10510 IEM_MC_BEGIN(0, 0, 0);
10511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10512 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10513 IEM_MC_ADVANCE_RIP_AND_FINISH();
10514 IEM_MC_END();
10515}
10516
10517
10518/** Opcode 0xdb 0xe5. */
10519FNIEMOP_DEF(iemOp_frstpm)
10520{
10521 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
10522#if 0 /* #UDs on newer CPUs */
10523 IEM_MC_BEGIN(0, 0, 0);
10524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10525 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10526 IEM_MC_ADVANCE_RIP_AND_FINISH();
10527 IEM_MC_END();
10528 return VINF_SUCCESS;
10529#else
10530 IEMOP_RAISE_INVALID_OPCODE_RET();
10531#endif
10532}
10533
10534
10535/** Opcode 0xdb 11/5. */
10536FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
10537{
10538 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
10539 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10540 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fucomi_r80_by_r80,
10541 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10542}
10543
10544
10545/** Opcode 0xdb 11/6. */
10546FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
10547{
10548 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
10549 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10550 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
10551 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10552}
10553
10554
10555/**
10556 * @opcode 0xdb
10557 */
10558FNIEMOP_DEF(iemOp_EscF3)
10559{
10560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10561 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
10562 if (IEM_IS_MODRM_REG_MODE(bRm))
10563 {
10564 switch (IEM_GET_MODRM_REG_8(bRm))
10565 {
10566 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
10567 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
10568 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
10569 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
10570 case 4:
10571 switch (bRm)
10572 {
10573 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
10574 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
10575 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
10576 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
10577 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
10578 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
10579 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
10580 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
10581 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10582 }
10583 break;
10584 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
10585 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
10586 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10587 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10588 }
10589 }
10590 else
10591 {
10592 switch (IEM_GET_MODRM_REG_8(bRm))
10593 {
10594 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
10595 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
10596 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
10597 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
10598 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10599 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
10600 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10601 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
10602 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10603 }
10604 }
10605}
10606
10607
10608/**
10609 * Common worker for FPU instructions working on STn and ST0, and storing the
10610 * result in STn unless IE, DE or ZE was raised.
10611 *
10612 * @param bRm Mod R/M byte.
10613 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10614 */
10615FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10616{
10617 IEM_MC_BEGIN(3, 1, 0);
10618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10619 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10620 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10621 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10622 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10623
10624 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10625 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10626
10627 IEM_MC_PREPARE_FPU_USAGE();
10628 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10629 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10630 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10631 } IEM_MC_ELSE() {
10632 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10633 } IEM_MC_ENDIF();
10634 IEM_MC_ADVANCE_RIP_AND_FINISH();
10635
10636 IEM_MC_END();
10637}
10638
10639
10640/** Opcode 0xdc 11/0. */
10641FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
10642{
10643 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
10644 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
10645}
10646
10647
10648/** Opcode 0xdc 11/1. */
10649FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
10650{
10651 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
10652 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
10653}
10654
10655
10656/** Opcode 0xdc 11/4. */
10657FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
10658{
10659 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
10660 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
10661}
10662
10663
10664/** Opcode 0xdc 11/5. */
10665FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
10666{
10667 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
10668 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
10669}
10670
10671
10672/** Opcode 0xdc 11/6. */
10673FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
10674{
10675 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
10676 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
10677}
10678
10679
10680/** Opcode 0xdc 11/7. */
10681FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
10682{
10683 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
10684 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
10685}
10686
10687
10688/**
10689 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
10690 * memory operand, and storing the result in ST0.
10691 *
10692 * @param bRm Mod R/M byte.
10693 * @param pfnImpl Pointer to the instruction implementation (assembly).
10694 */
10695FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
10696{
10697 IEM_MC_BEGIN(3, 3, 0);
10698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10699 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10700 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
10701 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10702 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
10703 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
10704
10705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10707 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10708 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10709
10710 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10711 IEM_MC_PREPARE_FPU_USAGE();
10712 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
10713 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
10714 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10715 } IEM_MC_ELSE() {
10716 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10717 } IEM_MC_ENDIF();
10718 IEM_MC_ADVANCE_RIP_AND_FINISH();
10719
10720 IEM_MC_END();
10721}
10722
10723
10724/** Opcode 0xdc !11/0. */
10725FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
10726{
10727 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
10728 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
10729}
10730
10731
10732/** Opcode 0xdc !11/1. */
10733FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
10734{
10735 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
10736 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
10737}
10738
10739
10740/** Opcode 0xdc !11/2. */
10741FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
10742{
10743 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
10744
10745 IEM_MC_BEGIN(3, 3, 0);
10746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10747 IEM_MC_LOCAL(uint16_t, u16Fsw);
10748 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10749 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10750 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10751 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10752
10753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10755
10756 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10757 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10758 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10759
10760 IEM_MC_PREPARE_FPU_USAGE();
10761 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10762 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10763 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10764 } IEM_MC_ELSE() {
10765 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10766 } IEM_MC_ENDIF();
10767 IEM_MC_ADVANCE_RIP_AND_FINISH();
10768
10769 IEM_MC_END();
10770}
10771
10772
10773/** Opcode 0xdc !11/3. */
10774FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
10775{
10776 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
10777
10778 IEM_MC_BEGIN(3, 3, 0);
10779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10780 IEM_MC_LOCAL(uint16_t, u16Fsw);
10781 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10782 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10783 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10784 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10785
10786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10788
10789 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10790 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10791 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10792
10793 IEM_MC_PREPARE_FPU_USAGE();
10794 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10795 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10796 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10797 } IEM_MC_ELSE() {
10798 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10799 } IEM_MC_ENDIF();
10800 IEM_MC_ADVANCE_RIP_AND_FINISH();
10801
10802 IEM_MC_END();
10803}
10804
10805
10806/** Opcode 0xdc !11/4. */
10807FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
10808{
10809 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
10810 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
10811}
10812
10813
10814/** Opcode 0xdc !11/5. */
10815FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
10816{
10817 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
10818 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
10819}
10820
10821
10822/** Opcode 0xdc !11/6. */
10823FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
10824{
10825 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
10826 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
10827}
10828
10829
10830/** Opcode 0xdc !11/7. */
10831FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
10832{
10833 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
10834 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
10835}
10836
10837
10838/**
10839 * @opcode 0xdc
10840 */
10841FNIEMOP_DEF(iemOp_EscF4)
10842{
10843 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10844 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
10845 if (IEM_IS_MODRM_REG_MODE(bRm))
10846 {
10847 switch (IEM_GET_MODRM_REG_8(bRm))
10848 {
10849 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
10850 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
10851 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
10852 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
10853 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
10854 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
10855 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
10856 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
10857 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10858 }
10859 }
10860 else
10861 {
10862 switch (IEM_GET_MODRM_REG_8(bRm))
10863 {
10864 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
10865 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
10866 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
10867 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
10868 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
10869 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
10870 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
10871 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
10872 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10873 }
10874 }
10875}
10876
10877
10878/** Opcode 0xdd !11/0.
10879 * @sa iemOp_fld_m32r */
10880FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
10881{
10882 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
10883
10884 IEM_MC_BEGIN(2, 3, 0);
10885 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10886 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10887 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
10888 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10889 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
10890
10891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10893 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10894 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10895
10896 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10897 IEM_MC_PREPARE_FPU_USAGE();
10898 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10899 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
10900 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10901 } IEM_MC_ELSE() {
10902 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10903 } IEM_MC_ENDIF();
10904 IEM_MC_ADVANCE_RIP_AND_FINISH();
10905
10906 IEM_MC_END();
10907}
10908
10909
10910/** Opcode 0xdd !11/0. */
10911FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
10912{
10913 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
10914 IEM_MC_BEGIN(3, 2, 0);
10915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10916 IEM_MC_LOCAL(uint16_t, u16Fsw);
10917 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10918 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10919 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10920
10921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10923 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10924 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10925
10926 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10927 IEM_MC_PREPARE_FPU_USAGE();
10928 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10929 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10930 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10931 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10932 } IEM_MC_ELSE() {
10933 IEM_MC_IF_FCW_IM() {
10934 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10935 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10936 } IEM_MC_ENDIF();
10937 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10938 } IEM_MC_ENDIF();
10939 IEM_MC_ADVANCE_RIP_AND_FINISH();
10940
10941 IEM_MC_END();
10942}
10943
10944
10945/** Opcode 0xdd !11/0. */
10946FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
10947{
10948 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
10949 IEM_MC_BEGIN(3, 2, 0);
10950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10951 IEM_MC_LOCAL(uint16_t, u16Fsw);
10952 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10953 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10954 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10955
10956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10958 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10959 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10960
10961 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10962 IEM_MC_PREPARE_FPU_USAGE();
10963 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10964 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
10965 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10966 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10967 } IEM_MC_ELSE() {
10968 IEM_MC_IF_FCW_IM() {
10969 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
10970 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
10971 } IEM_MC_ENDIF();
10972 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10973 } IEM_MC_ENDIF();
10974 IEM_MC_ADVANCE_RIP_AND_FINISH();
10975
10976 IEM_MC_END();
10977}
10978
10979
10980
10981
10982/** Opcode 0xdd !11/0. */
10983FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
10984{
10985 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
10986 IEM_MC_BEGIN(3, 2, 0);
10987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10988 IEM_MC_LOCAL(uint16_t, u16Fsw);
10989 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10990 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10991 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10992
10993 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10995 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10996 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10997
10998 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10999 IEM_MC_PREPARE_FPU_USAGE();
11000 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11001 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11002 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11003 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11004 } IEM_MC_ELSE() {
11005 IEM_MC_IF_FCW_IM() {
11006 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11007 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11008 } IEM_MC_ENDIF();
11009 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11010 } IEM_MC_ENDIF();
11011 IEM_MC_ADVANCE_RIP_AND_FINISH();
11012
11013 IEM_MC_END();
11014}
11015
11016
11017/** Opcode 0xdd !11/0. */
11018FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
11019{
11020 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
11021 IEM_MC_BEGIN(3, 0, 0);
11022 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11023 IEM_MC_ARG(uint8_t, iEffSeg, 1);
11024 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
11025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11027 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11028 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11029 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11030 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
11031 IEM_MC_END();
11032}
11033
11034
11035/** Opcode 0xdd !11/0. */
11036FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
11037{
11038 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
11039 IEM_MC_BEGIN(3, 0, 0);
11040 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11041 IEM_MC_ARG(uint8_t, iEffSeg, 1);
11042 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
11043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11045 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11046 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
11047 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11048 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
11049 IEM_MC_END();
11050}
11051
11052/** Opcode 0xdd !11/0. */
11053FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
11054{
11055 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
11056
11057 IEM_MC_BEGIN(0, 2, 0);
11058 IEM_MC_LOCAL(uint16_t, u16Tmp);
11059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11060
11061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11063 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11064
11065 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11066 IEM_MC_FETCH_FSW(u16Tmp);
11067 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
11068 IEM_MC_ADVANCE_RIP_AND_FINISH();
11069
11070/** @todo Debug / drop a hint to the verifier that things may differ
11071 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
11072 * NT4SP1. (X86_FSW_PE) */
11073 IEM_MC_END();
11074}
11075
11076
11077/** Opcode 0xdd 11/0. */
11078FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
11079{
11080 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
11081 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
11082 unmodified. */
11083 IEM_MC_BEGIN(0, 0, 0);
11084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11085
11086 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11087 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11088
11089 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11090 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11091 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11092
11093 IEM_MC_ADVANCE_RIP_AND_FINISH();
11094 IEM_MC_END();
11095}
11096
11097
11098/** Opcode 0xdd 11/1. */
11099FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
11100{
11101 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
11102 IEM_MC_BEGIN(0, 2, 0);
11103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11104 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11105 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11106 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11107 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11108
11109 IEM_MC_PREPARE_FPU_USAGE();
11110 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11111 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11112 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11113 } IEM_MC_ELSE() {
11114 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11115 } IEM_MC_ENDIF();
11116
11117 IEM_MC_ADVANCE_RIP_AND_FINISH();
11118 IEM_MC_END();
11119}
11120
11121
11122/** Opcode 0xdd 11/3. */
11123FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
11124{
11125 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
11126 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
11127}
11128
11129
11130/** Opcode 0xdd 11/4. */
11131FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
11132{
11133 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
11134 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
11135}
11136
11137
11138/**
11139 * @opcode 0xdd
11140 */
11141FNIEMOP_DEF(iemOp_EscF5)
11142{
11143 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11144 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
11145 if (IEM_IS_MODRM_REG_MODE(bRm))
11146 {
11147 switch (IEM_GET_MODRM_REG_8(bRm))
11148 {
11149 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
11150 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
11151 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
11152 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
11153 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
11154 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
11155 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11156 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11158 }
11159 }
11160 else
11161 {
11162 switch (IEM_GET_MODRM_REG_8(bRm))
11163 {
11164 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
11165 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
11166 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
11167 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
11168 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
11169 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
11170 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
11171 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
11172 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11173 }
11174 }
11175}
11176
11177
11178/** Opcode 0xde 11/0. */
11179FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
11180{
11181 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
11182 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
11183}
11184
11185
11186/** Opcode 0xde 11/0. */
11187FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
11188{
11189 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
11190 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
11191}
11192
11193
11194/** Opcode 0xde 0xd9. */
11195FNIEMOP_DEF(iemOp_fcompp)
11196{
11197 IEMOP_MNEMONIC(fcompp, "fcompp");
11198 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
11199}
11200
11201
11202/** Opcode 0xde 11/4. */
11203FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
11204{
11205 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
11206 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
11207}
11208
11209
11210/** Opcode 0xde 11/5. */
11211FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
11212{
11213 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
11214 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
11215}
11216
11217
11218/** Opcode 0xde 11/6. */
11219FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
11220{
11221 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
11222 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
11223}
11224
11225
11226/** Opcode 0xde 11/7. */
11227FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
11228{
11229 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
11230 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
11231}
11232
11233
11234/**
11235 * Common worker for FPU instructions working on ST0 and an m16i, and storing
11236 * the result in ST0.
11237 *
11238 * @param bRm Mod R/M byte.
11239 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11240 */
11241FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
11242{
11243 IEM_MC_BEGIN(3, 3, 0);
11244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11245 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11246 IEM_MC_LOCAL(int16_t, i16Val2);
11247 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11248 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11249 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11250
11251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11253
11254 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11255 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11256 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11257
11258 IEM_MC_PREPARE_FPU_USAGE();
11259 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11260 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
11261 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11262 } IEM_MC_ELSE() {
11263 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11264 } IEM_MC_ENDIF();
11265 IEM_MC_ADVANCE_RIP_AND_FINISH();
11266
11267 IEM_MC_END();
11268}
11269
11270
11271/** Opcode 0xde !11/0. */
11272FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
11273{
11274 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
11275 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
11276}
11277
11278
11279/** Opcode 0xde !11/1. */
11280FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
11281{
11282 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
11283 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
11284}
11285
11286
11287/** Opcode 0xde !11/2. */
11288FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
11289{
11290 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
11291
11292 IEM_MC_BEGIN(3, 3, 0);
11293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11294 IEM_MC_LOCAL(uint16_t, u16Fsw);
11295 IEM_MC_LOCAL(int16_t, i16Val2);
11296 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11297 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11298 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11299
11300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11302
11303 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11304 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11305 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11306
11307 IEM_MC_PREPARE_FPU_USAGE();
11308 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11309 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11310 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11311 } IEM_MC_ELSE() {
11312 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11313 } IEM_MC_ENDIF();
11314 IEM_MC_ADVANCE_RIP_AND_FINISH();
11315
11316 IEM_MC_END();
11317}
11318
11319
11320/** Opcode 0xde !11/3. */
11321FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
11322{
11323 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
11324
11325 IEM_MC_BEGIN(3, 3, 0);
11326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11327 IEM_MC_LOCAL(uint16_t, u16Fsw);
11328 IEM_MC_LOCAL(int16_t, i16Val2);
11329 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11330 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11331 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11332
11333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11335
11336 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11337 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11338 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11339
11340 IEM_MC_PREPARE_FPU_USAGE();
11341 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11342 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11343 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11344 } IEM_MC_ELSE() {
11345 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11346 } IEM_MC_ENDIF();
11347 IEM_MC_ADVANCE_RIP_AND_FINISH();
11348
11349 IEM_MC_END();
11350}
11351
11352
11353/** Opcode 0xde !11/4. */
11354FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
11355{
11356 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
11357 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
11358}
11359
11360
11361/** Opcode 0xde !11/5. */
11362FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
11363{
11364 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
11365 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
11366}
11367
11368
11369/** Opcode 0xde !11/6. */
11370FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
11371{
11372 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
11373 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
11374}
11375
11376
11377/** Opcode 0xde !11/7. */
11378FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
11379{
11380 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
11381 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
11382}
11383
11384
11385/**
11386 * @opcode 0xde
11387 */
11388FNIEMOP_DEF(iemOp_EscF6)
11389{
11390 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11391 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
11392 if (IEM_IS_MODRM_REG_MODE(bRm))
11393 {
11394 switch (IEM_GET_MODRM_REG_8(bRm))
11395 {
11396 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
11397 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
11398 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
11399 case 3: if (bRm == 0xd9)
11400 return FNIEMOP_CALL(iemOp_fcompp);
11401 IEMOP_RAISE_INVALID_OPCODE_RET();
11402 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
11403 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
11404 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
11405 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
11406 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11407 }
11408 }
11409 else
11410 {
11411 switch (IEM_GET_MODRM_REG_8(bRm))
11412 {
11413 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
11414 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
11415 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
11416 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
11417 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
11418 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
11419 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
11420 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
11421 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11422 }
11423 }
11424}
11425
11426
11427/** Opcode 0xdf 11/0.
11428 * Undocument instruction, assumed to work like ffree + fincstp. */
11429FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
11430{
11431 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
11432 IEM_MC_BEGIN(0, 0, 0);
11433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11434
11435 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11436 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11437
11438 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11439 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11440 IEM_MC_FPU_STACK_INC_TOP();
11441 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11442
11443 IEM_MC_ADVANCE_RIP_AND_FINISH();
11444 IEM_MC_END();
11445}
11446
11447
11448/** Opcode 0xdf 0xe0. */
11449FNIEMOP_DEF(iemOp_fnstsw_ax)
11450{
11451 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
11452 IEM_MC_BEGIN(0, 1, 0);
11453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11454 IEM_MC_LOCAL(uint16_t, u16Tmp);
11455 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11456 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11457 IEM_MC_FETCH_FSW(u16Tmp);
11458 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
11459 IEM_MC_ADVANCE_RIP_AND_FINISH();
11460 IEM_MC_END();
11461}
11462
11463
11464/** Opcode 0xdf 11/5. */
11465FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
11466{
11467 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
11468 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
11469 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
11470 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11471}
11472
11473
11474/** Opcode 0xdf 11/6. */
11475FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
11476{
11477 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
11478 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
11479 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
11480 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11481}
11482
11483
11484/** Opcode 0xdf !11/0. */
11485FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
11486{
11487 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
11488
11489 IEM_MC_BEGIN(2, 3, 0);
11490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11491 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11492 IEM_MC_LOCAL(int16_t, i16Val);
11493 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11494 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
11495
11496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11498
11499 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11500 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11501 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11502
11503 IEM_MC_PREPARE_FPU_USAGE();
11504 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11505 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
11506 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11507 } IEM_MC_ELSE() {
11508 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11509 } IEM_MC_ENDIF();
11510 IEM_MC_ADVANCE_RIP_AND_FINISH();
11511
11512 IEM_MC_END();
11513}
11514
11515
11516/** Opcode 0xdf !11/1. */
11517FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
11518{
11519 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
11520 IEM_MC_BEGIN(3, 2, 0);
11521 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11522 IEM_MC_LOCAL(uint16_t, u16Fsw);
11523 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11524 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11525 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11526
11527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11529 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11530 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11531
11532 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11533 IEM_MC_PREPARE_FPU_USAGE();
11534 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11535 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11536 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11537 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11538 } IEM_MC_ELSE() {
11539 IEM_MC_IF_FCW_IM() {
11540 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11541 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11542 } IEM_MC_ENDIF();
11543 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11544 } IEM_MC_ENDIF();
11545 IEM_MC_ADVANCE_RIP_AND_FINISH();
11546
11547 IEM_MC_END();
11548}
11549
11550
11551/** Opcode 0xdf !11/2. */
11552FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
11553{
11554 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
11555 IEM_MC_BEGIN(3, 2, 0);
11556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11557 IEM_MC_LOCAL(uint16_t, u16Fsw);
11558 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11559 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11560 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11561
11562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11564 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11565 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11566
11567 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11568 IEM_MC_PREPARE_FPU_USAGE();
11569 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11570 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11571 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11572 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11573 } IEM_MC_ELSE() {
11574 IEM_MC_IF_FCW_IM() {
11575 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11576 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11577 } IEM_MC_ENDIF();
11578 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11579 } IEM_MC_ENDIF();
11580 IEM_MC_ADVANCE_RIP_AND_FINISH();
11581
11582 IEM_MC_END();
11583}
11584
11585
11586/** Opcode 0xdf !11/3. */
11587FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
11588{
11589 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
11590 IEM_MC_BEGIN(3, 2, 0);
11591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11592 IEM_MC_LOCAL(uint16_t, u16Fsw);
11593 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11594 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11595 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11596
11597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11599 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11600 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11601
11602 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11603 IEM_MC_PREPARE_FPU_USAGE();
11604 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11605 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11606 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11607 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11608 } IEM_MC_ELSE() {
11609 IEM_MC_IF_FCW_IM() {
11610 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11611 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11612 } IEM_MC_ENDIF();
11613 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11614 } IEM_MC_ENDIF();
11615 IEM_MC_ADVANCE_RIP_AND_FINISH();
11616
11617 IEM_MC_END();
11618}
11619
11620
11621/** Opcode 0xdf !11/4. */
11622FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
11623{
11624 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
11625
11626 IEM_MC_BEGIN(2, 3, 0);
11627 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11628 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11629 IEM_MC_LOCAL(RTPBCD80U, d80Val);
11630 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11631 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
11632
11633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11635
11636 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11637 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11638 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11639
11640 IEM_MC_PREPARE_FPU_USAGE();
11641 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11642 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
11643 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11644 } IEM_MC_ELSE() {
11645 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11646 } IEM_MC_ENDIF();
11647 IEM_MC_ADVANCE_RIP_AND_FINISH();
11648
11649 IEM_MC_END();
11650}
11651
11652
11653/** Opcode 0xdf !11/5. */
11654FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
11655{
11656 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
11657
11658 IEM_MC_BEGIN(2, 3, 0);
11659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11660 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11661 IEM_MC_LOCAL(int64_t, i64Val);
11662 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11663 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
11664
11665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11667
11668 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11669 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11670 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11671
11672 IEM_MC_PREPARE_FPU_USAGE();
11673 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11674 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
11675 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11676 } IEM_MC_ELSE() {
11677 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11678 } IEM_MC_ENDIF();
11679 IEM_MC_ADVANCE_RIP_AND_FINISH();
11680
11681 IEM_MC_END();
11682}
11683
11684
11685/** Opcode 0xdf !11/6. */
11686FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
11687{
11688 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
11689 IEM_MC_BEGIN(3, 2, 0);
11690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11691 IEM_MC_LOCAL(uint16_t, u16Fsw);
11692 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11693 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
11694 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11695
11696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11698 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11699 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11700
11701 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
11702 IEM_MC_PREPARE_FPU_USAGE();
11703 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11704 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
11705 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
11706 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11707 } IEM_MC_ELSE() {
11708 IEM_MC_IF_FCW_IM() {
11709 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
11710 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
11711 } IEM_MC_ENDIF();
11712 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11713 } IEM_MC_ENDIF();
11714 IEM_MC_ADVANCE_RIP_AND_FINISH();
11715
11716 IEM_MC_END();
11717}
11718
11719
11720/** Opcode 0xdf !11/7. */
11721FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
11722{
11723 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
11724 IEM_MC_BEGIN(3, 2, 0);
11725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11726 IEM_MC_LOCAL(uint16_t, u16Fsw);
11727 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11728 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11729 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11730
11731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11733 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11734 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11735
11736 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11737 IEM_MC_PREPARE_FPU_USAGE();
11738 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11739 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11740 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11741 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11742 } IEM_MC_ELSE() {
11743 IEM_MC_IF_FCW_IM() {
11744 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11745 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
11746 } IEM_MC_ENDIF();
11747 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11748 } IEM_MC_ENDIF();
11749 IEM_MC_ADVANCE_RIP_AND_FINISH();
11750
11751 IEM_MC_END();
11752}
11753
11754
11755/**
11756 * @opcode 0xdf
11757 */
11758FNIEMOP_DEF(iemOp_EscF7)
11759{
11760 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11761 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
11762 if (IEM_IS_MODRM_REG_MODE(bRm))
11763 {
11764 switch (IEM_GET_MODRM_REG_8(bRm))
11765 {
11766 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
11767 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
11768 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11769 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11770 case 4: if (bRm == 0xe0)
11771 return FNIEMOP_CALL(iemOp_fnstsw_ax);
11772 IEMOP_RAISE_INVALID_OPCODE_RET();
11773 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
11774 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
11775 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11776 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11777 }
11778 }
11779 else
11780 {
11781 switch (IEM_GET_MODRM_REG_8(bRm))
11782 {
11783 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
11784 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
11785 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
11786 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
11787 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
11788 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
11789 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
11790 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
11791 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11792 }
11793 }
11794}
11795
11796
11797/**
11798 * @opcode 0xe0
11799 */
11800FNIEMOP_DEF(iemOp_loopne_Jb)
11801{
11802 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
11803 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11804 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11805
11806 switch (pVCpu->iem.s.enmEffAddrMode)
11807 {
11808 case IEMMODE_16BIT:
11809 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT);
11810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11811 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11812 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11813 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11814 } IEM_MC_ELSE() {
11815 IEM_MC_ADVANCE_RIP_AND_FINISH();
11816 } IEM_MC_ENDIF();
11817 IEM_MC_END();
11818 break;
11819
11820 case IEMMODE_32BIT:
11821 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
11822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11823 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11824 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11825 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11826 } IEM_MC_ELSE() {
11827 IEM_MC_ADVANCE_RIP_AND_FINISH();
11828 } IEM_MC_ENDIF();
11829 IEM_MC_END();
11830 break;
11831
11832 case IEMMODE_64BIT:
11833 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT);
11834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11835 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11836 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11837 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11838 } IEM_MC_ELSE() {
11839 IEM_MC_ADVANCE_RIP_AND_FINISH();
11840 } IEM_MC_ENDIF();
11841 IEM_MC_END();
11842 break;
11843
11844 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11845 }
11846}
11847
11848
11849/**
11850 * @opcode 0xe1
11851 */
11852FNIEMOP_DEF(iemOp_loope_Jb)
11853{
11854 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
11855 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11856 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11857
11858 switch (pVCpu->iem.s.enmEffAddrMode)
11859 {
11860 case IEMMODE_16BIT:
11861 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT);
11862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11863 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11864 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11865 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11866 } IEM_MC_ELSE() {
11867 IEM_MC_ADVANCE_RIP_AND_FINISH();
11868 } IEM_MC_ENDIF();
11869 IEM_MC_END();
11870 break;
11871
11872 case IEMMODE_32BIT:
11873 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
11874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11875 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11876 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11877 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11878 } IEM_MC_ELSE() {
11879 IEM_MC_ADVANCE_RIP_AND_FINISH();
11880 } IEM_MC_ENDIF();
11881 IEM_MC_END();
11882 break;
11883
11884 case IEMMODE_64BIT:
11885 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT);
11886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11887 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11888 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11889 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11890 } IEM_MC_ELSE() {
11891 IEM_MC_ADVANCE_RIP_AND_FINISH();
11892 } IEM_MC_ENDIF();
11893 IEM_MC_END();
11894 break;
11895
11896 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11897 }
11898}
11899
11900
11901/**
11902 * @opcode 0xe2
11903 */
11904FNIEMOP_DEF(iemOp_loop_Jb)
11905{
11906 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
11907 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11908 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11909
11910 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
11911 * using the 32-bit operand size override. How can that be restarted? See
11912 * weird pseudo code in intel manual. */
11913
11914 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
11915 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
11916 * the loop causes guest crashes, but when logging it's nice to skip a few million
11917 * lines of useless output. */
11918#if defined(LOG_ENABLED)
11919 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
11920 switch (pVCpu->iem.s.enmEffAddrMode)
11921 {
11922 case IEMMODE_16BIT:
11923 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT);
11924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11925 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
11926 IEM_MC_ADVANCE_RIP_AND_FINISH();
11927 IEM_MC_END();
11928 break;
11929
11930 case IEMMODE_32BIT:
11931 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
11932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11933 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
11934 IEM_MC_ADVANCE_RIP_AND_FINISH();
11935 IEM_MC_END();
11936 break;
11937
11938 case IEMMODE_64BIT:
11939 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT);
11940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11941 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
11942 IEM_MC_ADVANCE_RIP_AND_FINISH();
11943 IEM_MC_END();
11944 break;
11945
11946 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11947 }
11948#endif
11949
11950 switch (pVCpu->iem.s.enmEffAddrMode)
11951 {
11952 case IEMMODE_16BIT:
11953 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT);
11954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11955 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11956 IEM_MC_IF_CX_IS_NZ() {
11957 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11958 } IEM_MC_ELSE() {
11959 IEM_MC_ADVANCE_RIP_AND_FINISH();
11960 } IEM_MC_ENDIF();
11961 IEM_MC_END();
11962 break;
11963
11964 case IEMMODE_32BIT:
11965 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
11966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11967 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11968 IEM_MC_IF_ECX_IS_NZ() {
11969 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11970 } IEM_MC_ELSE() {
11971 IEM_MC_ADVANCE_RIP_AND_FINISH();
11972 } IEM_MC_ENDIF();
11973 IEM_MC_END();
11974 break;
11975
11976 case IEMMODE_64BIT:
11977 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT);
11978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11979 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11980 IEM_MC_IF_RCX_IS_NZ() {
11981 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11982 } IEM_MC_ELSE() {
11983 IEM_MC_ADVANCE_RIP_AND_FINISH();
11984 } IEM_MC_ENDIF();
11985 IEM_MC_END();
11986 break;
11987
11988 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11989 }
11990}
11991
11992
11993/**
11994 * @opcode 0xe3
11995 */
11996FNIEMOP_DEF(iemOp_jecxz_Jb)
11997{
11998 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
11999 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12000 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12001
12002 switch (pVCpu->iem.s.enmEffAddrMode)
12003 {
12004 case IEMMODE_16BIT:
12005 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT);
12006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12007 IEM_MC_IF_CX_IS_NZ() {
12008 IEM_MC_ADVANCE_RIP_AND_FINISH();
12009 } IEM_MC_ELSE() {
12010 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12011 } IEM_MC_ENDIF();
12012 IEM_MC_END();
12013 break;
12014
12015 case IEMMODE_32BIT:
12016 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
12017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12018 IEM_MC_IF_ECX_IS_NZ() {
12019 IEM_MC_ADVANCE_RIP_AND_FINISH();
12020 } IEM_MC_ELSE() {
12021 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12022 } IEM_MC_ENDIF();
12023 IEM_MC_END();
12024 break;
12025
12026 case IEMMODE_64BIT:
12027 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT);
12028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12029 IEM_MC_IF_RCX_IS_NZ() {
12030 IEM_MC_ADVANCE_RIP_AND_FINISH();
12031 } IEM_MC_ELSE() {
12032 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12033 } IEM_MC_ENDIF();
12034 IEM_MC_END();
12035 break;
12036
12037 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12038 }
12039}
12040
12041
12042/** Opcode 0xe4 */
12043FNIEMOP_DEF(iemOp_in_AL_Ib)
12044{
12045 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
12046 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12048 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12049 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12050}
12051
12052
12053/** Opcode 0xe5 */
12054FNIEMOP_DEF(iemOp_in_eAX_Ib)
12055{
12056 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
12057 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12059 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12060 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12061 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12062}
12063
12064
12065/** Opcode 0xe6 */
12066FNIEMOP_DEF(iemOp_out_Ib_AL)
12067{
12068 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
12069 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12071 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12072 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12073}
12074
12075
12076/** Opcode 0xe7 */
12077FNIEMOP_DEF(iemOp_out_Ib_eAX)
12078{
12079 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
12080 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12082 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12083 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12084 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12085}
12086
12087
12088/**
12089 * @opcode 0xe8
12090 */
12091FNIEMOP_DEF(iemOp_call_Jv)
12092{
12093 IEMOP_MNEMONIC(call_Jv, "call Jv");
12094 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12095 switch (pVCpu->iem.s.enmEffOpSize)
12096 {
12097 case IEMMODE_16BIT:
12098 {
12099 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12100 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_16, (int16_t)u16Imm);
12101 }
12102
12103 case IEMMODE_32BIT:
12104 {
12105 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12106 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_32, (int32_t)u32Imm);
12107 }
12108
12109 case IEMMODE_64BIT:
12110 {
12111 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12112 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_64, u64Imm);
12113 }
12114
12115 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12116 }
12117}
12118
12119
12120/**
12121 * @opcode 0xe9
12122 */
12123FNIEMOP_DEF(iemOp_jmp_Jv)
12124{
12125 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
12126 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12127 switch (pVCpu->iem.s.enmEffOpSize)
12128 {
12129 case IEMMODE_16BIT:
12130 IEM_MC_BEGIN(0, 0, 0);
12131 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
12132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12133 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
12134 IEM_MC_END();
12135 break;
12136
12137 case IEMMODE_64BIT:
12138 case IEMMODE_32BIT:
12139 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
12140 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
12141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12142 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
12143 IEM_MC_END();
12144 break;
12145
12146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12147 }
12148}
12149
12150
12151/**
12152 * @opcode 0xea
12153 */
12154FNIEMOP_DEF(iemOp_jmp_Ap)
12155{
12156 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
12157 IEMOP_HLP_NO_64BIT();
12158
12159 /* Decode the far pointer address and pass it on to the far call C implementation. */
12160 uint32_t off32Seg;
12161 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12162 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
12163 else
12164 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
12165 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
12166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12167 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
12168 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12169 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
12170}
12171
12172
12173/**
12174 * @opcode 0xeb
12175 */
12176FNIEMOP_DEF(iemOp_jmp_Jb)
12177{
12178 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
12179 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12180 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12181
12182 IEM_MC_BEGIN(0, 0, 0);
12183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12184 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12185 IEM_MC_END();
12186}
12187
12188
12189/** Opcode 0xec */
12190FNIEMOP_DEF(iemOp_in_AL_DX)
12191{
12192 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
12193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12194 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12195 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
12196}
12197
12198
12199/** Opcode 0xed */
12200FNIEMOP_DEF(iemOp_in_eAX_DX)
12201{
12202 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
12203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12204 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12205 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12206 pVCpu->iem.s.enmEffAddrMode);
12207}
12208
12209
12210/** Opcode 0xee */
12211FNIEMOP_DEF(iemOp_out_DX_AL)
12212{
12213 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
12214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12215 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12216 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
12217}
12218
12219
12220/** Opcode 0xef */
12221FNIEMOP_DEF(iemOp_out_DX_eAX)
12222{
12223 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
12224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12225 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12226 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12227 pVCpu->iem.s.enmEffAddrMode);
12228}
12229
12230
12231/**
12232 * @opcode 0xf0
12233 */
12234FNIEMOP_DEF(iemOp_lock)
12235{
12236 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
12237 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12238 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
12239
12240 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12241 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12242}
12243
12244
12245/**
12246 * @opcode 0xf1
12247 */
12248FNIEMOP_DEF(iemOp_int1)
12249{
12250 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
12251 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
12252 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
12253 * LOADALL memo. Needs some testing. */
12254 IEMOP_HLP_MIN_386();
12255 /** @todo testcase! */
12256 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
12257 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
12258 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
12259}
12260
12261
12262/**
12263 * @opcode 0xf2
12264 */
12265FNIEMOP_DEF(iemOp_repne)
12266{
12267 /* This overrides any previous REPE prefix. */
12268 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
12269 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
12270 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
12271
12272 /* For the 4 entry opcode tables, REPNZ overrides any previous
12273 REPZ and operand size prefixes. */
12274 pVCpu->iem.s.idxPrefix = 3;
12275
12276 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12277 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12278}
12279
12280
12281/**
12282 * @opcode 0xf3
12283 */
12284FNIEMOP_DEF(iemOp_repe)
12285{
12286 /* This overrides any previous REPNE prefix. */
12287 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
12288 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
12289 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
12290
12291 /* For the 4 entry opcode tables, REPNZ overrides any previous
12292 REPNZ and operand size prefixes. */
12293 pVCpu->iem.s.idxPrefix = 2;
12294
12295 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12296 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12297}
12298
12299
12300/**
12301 * @opcode 0xf4
12302 */
12303FNIEMOP_DEF(iemOp_hlt)
12304{
12305 IEMOP_MNEMONIC(hlt, "hlt");
12306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12307 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, iemCImpl_hlt);
12308}
12309
12310
12311/**
12312 * @opcode 0xf5
12313 */
12314FNIEMOP_DEF(iemOp_cmc)
12315{
12316 IEMOP_MNEMONIC(cmc, "cmc");
12317 IEM_MC_BEGIN(0, 0, 0);
12318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12319 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
12320 IEM_MC_ADVANCE_RIP_AND_FINISH();
12321 IEM_MC_END();
12322}
12323
12324
12325/**
12326 * Body for of 'inc/dec/not/neg Eb'.
12327 */
12328#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
12329 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
12330 { \
12331 /* register access */ \
12332 IEM_MC_BEGIN(2, 0, 0); \
12333 IEMOP_HLP_DONE_DECODING(); \
12334 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12335 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12336 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
12337 IEM_MC_REF_EFLAGS(pEFlags); \
12338 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12339 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12340 IEM_MC_END(); \
12341 } \
12342 else \
12343 { \
12344 /* memory access. */ \
12345 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
12346 { \
12347 IEM_MC_BEGIN(2, 2, 0); \
12348 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12349 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12351 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12352 \
12353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12354 IEMOP_HLP_DONE_DECODING(); \
12355 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12356 IEM_MC_FETCH_EFLAGS(EFlags); \
12357 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12358 \
12359 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12360 IEM_MC_COMMIT_EFLAGS(EFlags); \
12361 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12362 IEM_MC_END(); \
12363 } \
12364 else \
12365 { \
12366 IEM_MC_BEGIN(2, 2, 0); \
12367 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12368 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12370 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12371 \
12372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12373 IEMOP_HLP_DONE_DECODING(); \
12374 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12375 IEM_MC_FETCH_EFLAGS(EFlags); \
12376 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
12377 \
12378 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12379 IEM_MC_COMMIT_EFLAGS(EFlags); \
12380 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12381 IEM_MC_END(); \
12382 } \
12383 } \
12384 (void)0
12385
12386
12387/**
12388 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
12389 */
12390#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
12391 if (IEM_IS_MODRM_REG_MODE(bRm)) \
12392 { \
12393 /* \
12394 * Register target \
12395 */ \
12396 switch (pVCpu->iem.s.enmEffOpSize) \
12397 { \
12398 case IEMMODE_16BIT: \
12399 IEM_MC_BEGIN(2, 0, 0); \
12400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12401 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12402 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12403 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12404 IEM_MC_REF_EFLAGS(pEFlags); \
12405 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
12406 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12407 IEM_MC_END(); \
12408 break; \
12409 \
12410 case IEMMODE_32BIT: \
12411 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386); \
12412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12413 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12414 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12415 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12416 IEM_MC_REF_EFLAGS(pEFlags); \
12417 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
12418 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
12419 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12420 IEM_MC_END(); \
12421 break; \
12422 \
12423 case IEMMODE_64BIT: \
12424 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT); \
12425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12426 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12427 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12428 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12429 IEM_MC_REF_EFLAGS(pEFlags); \
12430 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
12431 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12432 IEM_MC_END(); \
12433 break; \
12434 \
12435 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12436 } \
12437 } \
12438 else \
12439 { \
12440 /* \
12441 * Memory target. \
12442 */ \
12443 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
12444 { \
12445 switch (pVCpu->iem.s.enmEffOpSize) \
12446 { \
12447 case IEMMODE_16BIT: \
12448 IEM_MC_BEGIN(2, 3, 0); \
12449 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12450 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12452 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12453 \
12454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12456 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12457 IEM_MC_FETCH_EFLAGS(EFlags); \
12458 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
12459 \
12460 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
12461 IEM_MC_COMMIT_EFLAGS(EFlags); \
12462 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12463 IEM_MC_END(); \
12464 break; \
12465 \
12466 case IEMMODE_32BIT: \
12467 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386); \
12468 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12469 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12471 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12472 \
12473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12475 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12476 IEM_MC_FETCH_EFLAGS(EFlags); \
12477 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
12478 \
12479 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
12480 IEM_MC_COMMIT_EFLAGS(EFlags); \
12481 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12482 IEM_MC_END(); \
12483 break; \
12484 \
12485 case IEMMODE_64BIT: \
12486 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT); \
12487 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12488 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12490 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12491 \
12492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12494 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12495 IEM_MC_FETCH_EFLAGS(EFlags); \
12496 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
12497 \
12498 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
12499 IEM_MC_COMMIT_EFLAGS(EFlags); \
12500 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12501 IEM_MC_END(); \
12502 break; \
12503 \
12504 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12505 } \
12506 } \
12507 else \
12508 { \
12509 (void)0
12510
12511#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
12512 switch (pVCpu->iem.s.enmEffOpSize) \
12513 { \
12514 case IEMMODE_16BIT: \
12515 IEM_MC_BEGIN(2, 3, 0); \
12516 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12517 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12519 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12520 \
12521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12522 IEMOP_HLP_DONE_DECODING(); \
12523 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12524 IEM_MC_FETCH_EFLAGS(EFlags); \
12525 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
12526 \
12527 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
12528 IEM_MC_COMMIT_EFLAGS(EFlags); \
12529 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12530 IEM_MC_END(); \
12531 break; \
12532 \
12533 case IEMMODE_32BIT: \
12534 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386); \
12535 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12536 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12538 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12539 \
12540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12541 IEMOP_HLP_DONE_DECODING(); \
12542 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12543 IEM_MC_FETCH_EFLAGS(EFlags); \
12544 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
12545 \
12546 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
12547 IEM_MC_COMMIT_EFLAGS(EFlags); \
12548 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12549 IEM_MC_END(); \
12550 break; \
12551 \
12552 case IEMMODE_64BIT: \
12553 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT); \
12554 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12555 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12557 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12558 \
12559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12560 IEMOP_HLP_DONE_DECODING(); \
12561 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12562 IEM_MC_FETCH_EFLAGS(EFlags); \
12563 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
12564 \
12565 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
12566 IEM_MC_COMMIT_EFLAGS(EFlags); \
12567 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12568 IEM_MC_END(); \
12569 break; \
12570 \
12571 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12572 } \
12573 } \
12574 } \
12575 (void)0
12576
12577
12578/**
12579 * @opmaps grp3_f6
12580 * @opcode /0
12581 * @todo also /1
12582 */
12583FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
12584{
12585 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
12586 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12587
12588 if (IEM_IS_MODRM_REG_MODE(bRm))
12589 {
12590 /* register access */
12591 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12592 IEM_MC_BEGIN(3, 0, 0);
12593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12594 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12595 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
12596 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12597 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12598 IEM_MC_REF_EFLAGS(pEFlags);
12599 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
12600 IEM_MC_ADVANCE_RIP_AND_FINISH();
12601 IEM_MC_END();
12602 }
12603 else
12604 {
12605 /* memory access. */
12606 IEM_MC_BEGIN(3, 3, 0);
12607 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
12608 IEM_MC_ARG(uint8_t, u8Src, 1);
12609 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12611 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12612
12613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12614 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12615 IEM_MC_ASSIGN(u8Src, u8Imm);
12616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12617 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12618 IEM_MC_FETCH_EFLAGS(EFlags);
12619 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
12620
12621 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo);
12622 IEM_MC_COMMIT_EFLAGS(EFlags);
12623 IEM_MC_ADVANCE_RIP_AND_FINISH();
12624 IEM_MC_END();
12625 }
12626}
12627
12628
12629/** Opcode 0xf6 /4, /5, /6 and /7. */
12630FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
12631{
12632 if (IEM_IS_MODRM_REG_MODE(bRm))
12633 {
12634 /* register access */
12635 IEM_MC_BEGIN(3, 1, 0);
12636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12637 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12638 IEM_MC_ARG(uint8_t, u8Value, 1);
12639 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12640 IEM_MC_LOCAL(int32_t, rc);
12641
12642 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12643 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12644 IEM_MC_REF_EFLAGS(pEFlags);
12645 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12646 IEM_MC_IF_LOCAL_IS_Z(rc) {
12647 IEM_MC_ADVANCE_RIP_AND_FINISH();
12648 } IEM_MC_ELSE() {
12649 IEM_MC_RAISE_DIVIDE_ERROR();
12650 } IEM_MC_ENDIF();
12651
12652 IEM_MC_END();
12653 }
12654 else
12655 {
12656 /* memory access. */
12657 IEM_MC_BEGIN(3, 2, 0);
12658 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12659 IEM_MC_ARG(uint8_t, u8Value, 1);
12660 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12662 IEM_MC_LOCAL(int32_t, rc);
12663
12664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12666 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12667 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12668 IEM_MC_REF_EFLAGS(pEFlags);
12669 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12670 IEM_MC_IF_LOCAL_IS_Z(rc) {
12671 IEM_MC_ADVANCE_RIP_AND_FINISH();
12672 } IEM_MC_ELSE() {
12673 IEM_MC_RAISE_DIVIDE_ERROR();
12674 } IEM_MC_ENDIF();
12675
12676 IEM_MC_END();
12677 }
12678}
12679
12680
12681/** Opcode 0xf7 /4, /5, /6 and /7. */
12682FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
12683{
12684 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12685
12686 if (IEM_IS_MODRM_REG_MODE(bRm))
12687 {
12688 /* register access */
12689 switch (pVCpu->iem.s.enmEffOpSize)
12690 {
12691 case IEMMODE_16BIT:
12692 IEM_MC_BEGIN(4, 1, 0);
12693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12694 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12695 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12696 IEM_MC_ARG(uint16_t, u16Value, 2);
12697 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12698 IEM_MC_LOCAL(int32_t, rc);
12699
12700 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12701 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12702 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12703 IEM_MC_REF_EFLAGS(pEFlags);
12704 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12705 IEM_MC_IF_LOCAL_IS_Z(rc) {
12706 IEM_MC_ADVANCE_RIP_AND_FINISH();
12707 } IEM_MC_ELSE() {
12708 IEM_MC_RAISE_DIVIDE_ERROR();
12709 } IEM_MC_ENDIF();
12710
12711 IEM_MC_END();
12712 break;
12713
12714 case IEMMODE_32BIT:
12715 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386);
12716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12717 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12718 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12719 IEM_MC_ARG(uint32_t, u32Value, 2);
12720 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12721 IEM_MC_LOCAL(int32_t, rc);
12722
12723 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12724 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12725 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12726 IEM_MC_REF_EFLAGS(pEFlags);
12727 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12728 IEM_MC_IF_LOCAL_IS_Z(rc) {
12729 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12730 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12731 IEM_MC_ADVANCE_RIP_AND_FINISH();
12732 } IEM_MC_ELSE() {
12733 IEM_MC_RAISE_DIVIDE_ERROR();
12734 } IEM_MC_ENDIF();
12735
12736 IEM_MC_END();
12737 break;
12738
12739 case IEMMODE_64BIT:
12740 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT);
12741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12742 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12743 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12744 IEM_MC_ARG(uint64_t, u64Value, 2);
12745 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12746 IEM_MC_LOCAL(int32_t, rc);
12747
12748 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12749 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12750 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12751 IEM_MC_REF_EFLAGS(pEFlags);
12752 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12753 IEM_MC_IF_LOCAL_IS_Z(rc) {
12754 IEM_MC_ADVANCE_RIP_AND_FINISH();
12755 } IEM_MC_ELSE() {
12756 IEM_MC_RAISE_DIVIDE_ERROR();
12757 } IEM_MC_ENDIF();
12758
12759 IEM_MC_END();
12760 break;
12761
12762 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12763 }
12764 }
12765 else
12766 {
12767 /* memory access. */
12768 switch (pVCpu->iem.s.enmEffOpSize)
12769 {
12770 case IEMMODE_16BIT:
12771 IEM_MC_BEGIN(4, 2, 0);
12772 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12773 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12774 IEM_MC_ARG(uint16_t, u16Value, 2);
12775 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12777 IEM_MC_LOCAL(int32_t, rc);
12778
12779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12781 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12782 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12783 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12784 IEM_MC_REF_EFLAGS(pEFlags);
12785 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12786 IEM_MC_IF_LOCAL_IS_Z(rc) {
12787 IEM_MC_ADVANCE_RIP_AND_FINISH();
12788 } IEM_MC_ELSE() {
12789 IEM_MC_RAISE_DIVIDE_ERROR();
12790 } IEM_MC_ENDIF();
12791
12792 IEM_MC_END();
12793 break;
12794
12795 case IEMMODE_32BIT:
12796 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386);
12797 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12798 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12799 IEM_MC_ARG(uint32_t, u32Value, 2);
12800 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12802 IEM_MC_LOCAL(int32_t, rc);
12803
12804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12806 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12807 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12808 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12809 IEM_MC_REF_EFLAGS(pEFlags);
12810 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12811 IEM_MC_IF_LOCAL_IS_Z(rc) {
12812 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12813 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12814 IEM_MC_ADVANCE_RIP_AND_FINISH();
12815 } IEM_MC_ELSE() {
12816 IEM_MC_RAISE_DIVIDE_ERROR();
12817 } IEM_MC_ENDIF();
12818
12819 IEM_MC_END();
12820 break;
12821
12822 case IEMMODE_64BIT:
12823 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT);
12824 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12825 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12826 IEM_MC_ARG(uint64_t, u64Value, 2);
12827 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12829 IEM_MC_LOCAL(int32_t, rc);
12830
12831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12833 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12834 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12835 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12836 IEM_MC_REF_EFLAGS(pEFlags);
12837 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12838 IEM_MC_IF_LOCAL_IS_Z(rc) {
12839 IEM_MC_ADVANCE_RIP_AND_FINISH();
12840 } IEM_MC_ELSE() {
12841 IEM_MC_RAISE_DIVIDE_ERROR();
12842 } IEM_MC_ENDIF();
12843
12844 IEM_MC_END();
12845 break;
12846
12847 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12848 }
12849 }
12850}
12851
12852
12853/**
12854 * @opmaps grp3_f6
12855 * @opcode /2
12856 */
12857FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
12858{
12859 IEMOP_MNEMONIC(not_Eb, "not Eb");
12860 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
12861}
12862
12863
12864/**
12865 * @opmaps grp3_f6
12866 * @opcode /3
12867 */
12868FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
12869{
12870 IEMOP_MNEMONIC(net_Eb, "neg Eb");
12871 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
12872}
12873
12874
12875/**
12876 * @opcode 0xf6
12877 */
12878FNIEMOP_DEF(iemOp_Grp3_Eb)
12879{
12880 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12881 switch (IEM_GET_MODRM_REG_8(bRm))
12882 {
12883 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12884 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12885 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
12886 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
12887 case 4:
12888 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
12889 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12890 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
12891 case 5:
12892 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
12893 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12894 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
12895 case 6:
12896 IEMOP_MNEMONIC(div_Eb, "div Eb");
12897 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12898 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
12899 case 7:
12900 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
12901 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12902 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
12903 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12904 }
12905}
12906
12907
12908/** Opcode 0xf7 /0. */
12909FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
12910{
12911 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
12912 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12913
12914 if (IEM_IS_MODRM_REG_MODE(bRm))
12915 {
12916 /* register access */
12917 switch (pVCpu->iem.s.enmEffOpSize)
12918 {
12919 case IEMMODE_16BIT:
12920 IEM_MC_BEGIN(3, 0, 0);
12921 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12923 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12924 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
12925 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12926 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12927 IEM_MC_REF_EFLAGS(pEFlags);
12928 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12929 IEM_MC_ADVANCE_RIP_AND_FINISH();
12930 IEM_MC_END();
12931 break;
12932
12933 case IEMMODE_32BIT:
12934 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386);
12935 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12937 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12938 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
12939 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12940 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12941 IEM_MC_REF_EFLAGS(pEFlags);
12942 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
12943 /* No clearing the high dword here - test doesn't write back the result. */
12944 IEM_MC_ADVANCE_RIP_AND_FINISH();
12945 IEM_MC_END();
12946 break;
12947
12948 case IEMMODE_64BIT:
12949 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT);
12950 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12952 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12953 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
12954 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12955 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12956 IEM_MC_REF_EFLAGS(pEFlags);
12957 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
12958 IEM_MC_ADVANCE_RIP_AND_FINISH();
12959 IEM_MC_END();
12960 break;
12961
12962 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12963 }
12964 }
12965 else
12966 {
12967 /* memory access. */
12968 switch (pVCpu->iem.s.enmEffOpSize)
12969 {
12970 case IEMMODE_16BIT:
12971 IEM_MC_BEGIN(3, 3, 0);
12972 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
12973 IEM_MC_ARG(uint16_t, u16Src, 1);
12974 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12976 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12977
12978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12979 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12980 IEM_MC_ASSIGN(u16Src, u16Imm);
12981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12982 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12983 IEM_MC_FETCH_EFLAGS(EFlags);
12984 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12985
12986 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo);
12987 IEM_MC_COMMIT_EFLAGS(EFlags);
12988 IEM_MC_ADVANCE_RIP_AND_FINISH();
12989 IEM_MC_END();
12990 break;
12991
12992 case IEMMODE_32BIT:
12993 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386);
12994 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
12995 IEM_MC_ARG(uint32_t, u32Src, 1);
12996 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12997 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12998 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12999
13000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13001 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13002 IEM_MC_ASSIGN(u32Src, u32Imm);
13003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13004 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13005 IEM_MC_FETCH_EFLAGS(EFlags);
13006 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13007
13008 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo);
13009 IEM_MC_COMMIT_EFLAGS(EFlags);
13010 IEM_MC_ADVANCE_RIP_AND_FINISH();
13011 IEM_MC_END();
13012 break;
13013
13014 case IEMMODE_64BIT:
13015 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT);
13016 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
13017 IEM_MC_ARG(uint64_t, u64Src, 1);
13018 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13020 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13021
13022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13023 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13024 IEM_MC_ASSIGN(u64Src, u64Imm);
13025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13026 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13027 IEM_MC_FETCH_EFLAGS(EFlags);
13028 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13029
13030 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo);
13031 IEM_MC_COMMIT_EFLAGS(EFlags);
13032 IEM_MC_ADVANCE_RIP_AND_FINISH();
13033 IEM_MC_END();
13034 break;
13035
13036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13037 }
13038 }
13039}
13040
13041
13042/** Opcode 0xf7 /2. */
13043FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
13044{
13045 IEMOP_MNEMONIC(not_Ev, "not Ev");
13046 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
13047 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
13048}
13049
13050
13051/** Opcode 0xf7 /3. */
13052FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
13053{
13054 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
13055 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
13056 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
13057}
13058
13059
13060/**
13061 * @opcode 0xf7
13062 */
13063FNIEMOP_DEF(iemOp_Grp3_Ev)
13064{
13065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13066 switch (IEM_GET_MODRM_REG_8(bRm))
13067 {
13068 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13069 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13070 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
13071 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
13072 case 4:
13073 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
13074 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13075 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
13076 case 5:
13077 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
13078 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13079 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
13080 case 6:
13081 IEMOP_MNEMONIC(div_Ev, "div Ev");
13082 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13083 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
13084 case 7:
13085 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
13086 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13087 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
13088 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13089 }
13090}
13091
13092
13093/**
13094 * @opcode 0xf8
13095 */
13096FNIEMOP_DEF(iemOp_clc)
13097{
13098 IEMOP_MNEMONIC(clc, "clc");
13099 IEM_MC_BEGIN(0, 0, 0);
13100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13101 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
13102 IEM_MC_ADVANCE_RIP_AND_FINISH();
13103 IEM_MC_END();
13104}
13105
13106
13107/**
13108 * @opcode 0xf9
13109 */
13110FNIEMOP_DEF(iemOp_stc)
13111{
13112 IEMOP_MNEMONIC(stc, "stc");
13113 IEM_MC_BEGIN(0, 0, 0);
13114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13115 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
13116 IEM_MC_ADVANCE_RIP_AND_FINISH();
13117 IEM_MC_END();
13118}
13119
13120
13121/**
13122 * @opcode 0xfa
13123 */
13124FNIEMOP_DEF(iemOp_cli)
13125{
13126 IEMOP_MNEMONIC(cli, "cli");
13127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13128 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, iemCImpl_cli);
13129}
13130
13131
13132FNIEMOP_DEF(iemOp_sti)
13133{
13134 IEMOP_MNEMONIC(sti, "sti");
13135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13136 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER | IEM_CIMPL_F_VMEXIT, iemCImpl_sti);
13137}
13138
13139
13140/**
13141 * @opcode 0xfc
13142 */
13143FNIEMOP_DEF(iemOp_cld)
13144{
13145 IEMOP_MNEMONIC(cld, "cld");
13146 IEM_MC_BEGIN(0, 0, 0);
13147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13148 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
13149 IEM_MC_ADVANCE_RIP_AND_FINISH();
13150 IEM_MC_END();
13151}
13152
13153
13154/**
13155 * @opcode 0xfd
13156 */
13157FNIEMOP_DEF(iemOp_std)
13158{
13159 IEMOP_MNEMONIC(std, "std");
13160 IEM_MC_BEGIN(0, 0, 0);
13161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13162 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
13163 IEM_MC_ADVANCE_RIP_AND_FINISH();
13164 IEM_MC_END();
13165}
13166
13167
13168/**
13169 * @opmaps grp4
13170 * @opcode /0
13171 */
13172FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
13173{
13174 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
13175 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
13176}
13177
13178
13179/**
13180 * @opmaps grp4
13181 * @opcode /1
13182 */
13183FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
13184{
13185 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
13186 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
13187}
13188
13189
13190/**
13191 * @opcode 0xfe
13192 */
13193FNIEMOP_DEF(iemOp_Grp4)
13194{
13195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13196 switch (IEM_GET_MODRM_REG_8(bRm))
13197 {
13198 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
13199 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
13200 default:
13201 /** @todo is the eff-addr decoded? */
13202 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
13203 IEMOP_RAISE_INVALID_OPCODE_RET();
13204 }
13205}
13206
13207/** Opcode 0xff /0. */
13208FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
13209{
13210 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
13211 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
13212 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
13213}
13214
13215
13216/** Opcode 0xff /1. */
13217FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
13218{
13219 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
13220 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
13221 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
13222}
13223
13224
13225/**
13226 * Opcode 0xff /2.
13227 * @param bRm The RM byte.
13228 */
13229FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
13230{
13231 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
13232 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13233
13234 if (IEM_IS_MODRM_REG_MODE(bRm))
13235 {
13236 /* The new RIP is taken from a register. */
13237 switch (pVCpu->iem.s.enmEffOpSize)
13238 {
13239 case IEMMODE_16BIT:
13240 IEM_MC_BEGIN(1, 0, 0);
13241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13242 IEM_MC_ARG(uint16_t, u16Target, 0);
13243 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13244 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
13245 IEM_MC_END();
13246 break;
13247
13248 case IEMMODE_32BIT:
13249 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386);
13250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13251 IEM_MC_ARG(uint32_t, u32Target, 0);
13252 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13253 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
13254 IEM_MC_END();
13255 break;
13256
13257 case IEMMODE_64BIT:
13258 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT);
13259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13260 IEM_MC_ARG(uint64_t, u64Target, 0);
13261 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13262 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
13263 IEM_MC_END();
13264 break;
13265
13266 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13267 }
13268 }
13269 else
13270 {
13271 /* The new RIP is taken from a register. */
13272 switch (pVCpu->iem.s.enmEffOpSize)
13273 {
13274 case IEMMODE_16BIT:
13275 IEM_MC_BEGIN(1, 1, 0);
13276 IEM_MC_ARG(uint16_t, u16Target, 0);
13277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13280 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13281 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
13282 IEM_MC_END();
13283 break;
13284
13285 case IEMMODE_32BIT:
13286 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386);
13287 IEM_MC_ARG(uint32_t, u32Target, 0);
13288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13291 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13292 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
13293 IEM_MC_END();
13294 break;
13295
13296 case IEMMODE_64BIT:
13297 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT);
13298 IEM_MC_ARG(uint64_t, u64Target, 0);
13299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13302 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13303 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
13304 IEM_MC_END();
13305 break;
13306
13307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13308 }
13309 }
13310}
13311
13312#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl) \
13313 /* Registers? How?? */ \
13314 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
13315 { /* likely */ } \
13316 else \
13317 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
13318 \
13319 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
13320 /** @todo what does VIA do? */ \
13321 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
13322 { /* likely */ } \
13323 else \
13324 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
13325 \
13326 /* Far pointer loaded from memory. */ \
13327 switch (pVCpu->iem.s.enmEffOpSize) \
13328 { \
13329 case IEMMODE_16BIT: \
13330 IEM_MC_BEGIN(3, 1, 0); \
13331 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13332 IEM_MC_ARG(uint16_t, offSeg, 1); \
13333 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
13334 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13337 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13338 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
13339 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
13340 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
13341 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13342 IEM_MC_END(); \
13343 break; \
13344 \
13345 case IEMMODE_32BIT: \
13346 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386); \
13347 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13348 IEM_MC_ARG(uint32_t, offSeg, 1); \
13349 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
13350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13353 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13354 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
13355 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
13356 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
13357 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13358 IEM_MC_END(); \
13359 break; \
13360 \
13361 case IEMMODE_64BIT: \
13362 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
13363 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT); \
13364 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13365 IEM_MC_ARG(uint64_t, offSeg, 1); \
13366 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
13367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13370 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13371 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
13372 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE /* no gates */, \
13373 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13374 IEM_MC_END(); \
13375 break; \
13376 \
13377 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13378 } do {} while (0)
13379
13380
13381/**
13382 * Opcode 0xff /3.
13383 * @param bRm The RM byte.
13384 */
13385FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
13386{
13387 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
13388 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf);
13389}
13390
13391
13392/**
13393 * Opcode 0xff /4.
13394 * @param bRm The RM byte.
13395 */
13396FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
13397{
13398 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
13399 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13400
13401 if (IEM_IS_MODRM_REG_MODE(bRm))
13402 {
13403 /* The new RIP is taken from a register. */
13404 switch (pVCpu->iem.s.enmEffOpSize)
13405 {
13406 case IEMMODE_16BIT:
13407 IEM_MC_BEGIN(0, 1, 0);
13408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13409 IEM_MC_LOCAL(uint16_t, u16Target);
13410 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13411 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
13412 IEM_MC_END();
13413 break;
13414
13415 case IEMMODE_32BIT:
13416 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
13417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13418 IEM_MC_LOCAL(uint32_t, u32Target);
13419 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13420 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
13421 IEM_MC_END();
13422 break;
13423
13424 case IEMMODE_64BIT:
13425 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
13426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13427 IEM_MC_LOCAL(uint64_t, u64Target);
13428 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13429 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
13430 IEM_MC_END();
13431 break;
13432
13433 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13434 }
13435 }
13436 else
13437 {
13438 /* The new RIP is taken from a memory location. */
13439 switch (pVCpu->iem.s.enmEffOpSize)
13440 {
13441 case IEMMODE_16BIT:
13442 IEM_MC_BEGIN(0, 2, 0);
13443 IEM_MC_LOCAL(uint16_t, u16Target);
13444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13447 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13448 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
13449 IEM_MC_END();
13450 break;
13451
13452 case IEMMODE_32BIT:
13453 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386);
13454 IEM_MC_LOCAL(uint32_t, u32Target);
13455 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13458 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13459 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
13460 IEM_MC_END();
13461 break;
13462
13463 case IEMMODE_64BIT:
13464 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
13465 IEM_MC_LOCAL(uint64_t, u64Target);
13466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13469 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13470 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
13471 IEM_MC_END();
13472 break;
13473
13474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13475 }
13476 }
13477}
13478
13479
13480/**
13481 * Opcode 0xff /5.
13482 * @param bRm The RM byte.
13483 */
13484FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
13485{
13486 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
13487 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp);
13488}
13489
13490
13491/**
13492 * Opcode 0xff /6.
13493 * @param bRm The RM byte.
13494 */
13495FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
13496{
13497 IEMOP_MNEMONIC(push_Ev, "push Ev");
13498
13499 /* Registers are handled by a common worker. */
13500 if (IEM_IS_MODRM_REG_MODE(bRm))
13501 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
13502
13503 /* Memory we do here. */
13504 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13505 switch (pVCpu->iem.s.enmEffOpSize)
13506 {
13507 case IEMMODE_16BIT:
13508 IEM_MC_BEGIN(0, 2, 0);
13509 IEM_MC_LOCAL(uint16_t, u16Src);
13510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13513 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13514 IEM_MC_PUSH_U16(u16Src);
13515 IEM_MC_ADVANCE_RIP_AND_FINISH();
13516 IEM_MC_END();
13517 break;
13518
13519 case IEMMODE_32BIT:
13520 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT);
13521 IEM_MC_LOCAL(uint32_t, u32Src);
13522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13523 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13525 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13526 IEM_MC_PUSH_U32(u32Src);
13527 IEM_MC_ADVANCE_RIP_AND_FINISH();
13528 IEM_MC_END();
13529 break;
13530
13531 case IEMMODE_64BIT:
13532 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
13533 IEM_MC_LOCAL(uint64_t, u64Src);
13534 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13537 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13538 IEM_MC_PUSH_U64(u64Src);
13539 IEM_MC_ADVANCE_RIP_AND_FINISH();
13540 IEM_MC_END();
13541 break;
13542
13543 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13544 }
13545}
13546
13547
13548/**
13549 * @opcode 0xff
13550 */
13551FNIEMOP_DEF(iemOp_Grp5)
13552{
13553 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13554 switch (IEM_GET_MODRM_REG_8(bRm))
13555 {
13556 case 0:
13557 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
13558 case 1:
13559 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
13560 case 2:
13561 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
13562 case 3:
13563 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
13564 case 4:
13565 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
13566 case 5:
13567 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
13568 case 6:
13569 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
13570 case 7:
13571 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
13572 IEMOP_RAISE_INVALID_OPCODE_RET();
13573 }
13574 AssertFailedReturn(VERR_IEM_IPE_3);
13575}
13576
13577
13578
13579const PFNIEMOP g_apfnOneByteMap[256] =
13580{
13581 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
13582 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
13583 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
13584 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
13585 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
13586 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
13587 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
13588 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
13589 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
13590 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
13591 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
13592 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
13593 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
13594 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
13595 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
13596 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
13597 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
13598 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
13599 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
13600 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
13601 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
13602 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
13603 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
13604 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
13605 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
13606 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
13607 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
13608 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
13609 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
13610 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
13611 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
13612 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
13613 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
13614 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
13615 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
13616 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
13617 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
13618 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
13619 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
13620 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
13621 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
13622 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
13623 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
13624 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
13625 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
13626 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
13627 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
13628 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
13629 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
13630 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
13631 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
13632 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
13633 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
13634 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
13635 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
13636 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
13637 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
13638 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
13639 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
13640 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
13641 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
13642 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
13643 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
13644 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
13645};
13646
13647
13648/** @} */
13649
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette