VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 100834

Last change on this file since 100834 was 100834, checked in by vboxsync, 19 months ago

VMM/IEM: More conversion from IEM_MC_MEM_MAP to IEM_MC_MEM_MAP_XXX, fixing xchg todo regarding commit register changes after the memory change. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 484.3 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 100834 2023-08-09 14:49:39Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 *
63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
64 */
65#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8) \
66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
67 \
68 /* \
69 * If rm is denoting a register, no more instruction bytes. \
70 */ \
71 if (IEM_IS_MODRM_REG_MODE(bRm)) \
72 { \
73 IEM_MC_BEGIN(3, 0); \
74 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
75 IEM_MC_ARG(uint8_t, u8Src, 1); \
76 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
77 \
78 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
79 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
80 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
81 IEM_MC_REF_EFLAGS(pEFlags); \
82 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
83 \
84 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
85 IEM_MC_END(); \
86 } \
87 else \
88 { \
89 /* \
90 * We're accessing memory. \
91 * Note! We're putting the eflags on the stack here so we can commit them \
92 * after the memory. \
93 */ \
94 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
95 { \
96 IEM_MC_BEGIN(3, 3); \
97 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
98 IEM_MC_ARG(uint8_t, u8Src, 1); \
99 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
101 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
102 \
103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
104 IEMOP_HLP_DONE_DECODING(); \
105 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
106 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
107 IEM_MC_FETCH_EFLAGS(EFlags); \
108 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
109 \
110 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
111 IEM_MC_COMMIT_EFLAGS(EFlags); \
112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
113 IEM_MC_END(); \
114 } \
115 else \
116 { \
117 (void)0
118
119/**
120 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
121 * operands.
122 *
123 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
124 */
125#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8) \
126 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
127 \
128 /* \
129 * If rm is denoting a register, no more instruction bytes. \
130 */ \
131 if (IEM_IS_MODRM_REG_MODE(bRm)) \
132 { \
133 IEM_MC_BEGIN(3, 0); \
134 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
135 IEM_MC_ARG(uint8_t, u8Src, 1); \
136 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
137 \
138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
139 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
140 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
141 IEM_MC_REF_EFLAGS(pEFlags); \
142 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
143 \
144 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
145 IEM_MC_END(); \
146 } \
147 else \
148 { \
149 /* \
150 * We're accessing memory. \
151 * Note! We're putting the eflags on the stack here so we can commit them \
152 * after the memory. \
153 */ \
154 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
155 { \
156 IEM_MC_BEGIN(3, 3); \
157 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
158 IEM_MC_ARG(uint8_t, u8Src, 1); \
159 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
161 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
162 \
163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
164 IEMOP_HLP_DONE_DECODING(); \
165 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
166 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
167 IEM_MC_FETCH_EFLAGS(EFlags); \
168 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
169 \
170 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
171 IEM_MC_COMMIT_EFLAGS(EFlags); \
172 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
173 IEM_MC_END(); \
174 } \
175 else \
176 { \
177 (void)0
178
179#define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \
180 IEMOP_HLP_DONE_DECODING(); \
181 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
182 } \
183 } \
184 (void)0
185
186#define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \
187 IEM_MC_BEGIN(3, 3); \
188 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
189 IEM_MC_ARG(uint8_t, u8Src, 1); \
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
192 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
193 \
194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
195 IEMOP_HLP_DONE_DECODING(); \
196 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
197 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
198 IEM_MC_FETCH_EFLAGS(EFlags); \
199 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
200 \
201 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bMapInfoDst); \
202 IEM_MC_COMMIT_EFLAGS(EFlags); \
203 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
204 IEM_MC_END(); \
205 } \
206 } \
207 (void)0
208
209/**
210 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
211 * destination.
212 */
213#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
215 \
216 /* \
217 * If rm is denoting a register, no more instruction bytes. \
218 */ \
219 if (IEM_IS_MODRM_REG_MODE(bRm)) \
220 { \
221 IEM_MC_BEGIN(3, 0); \
222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
223 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
224 IEM_MC_ARG(uint8_t, u8Src, 1); \
225 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
226 \
227 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
228 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
229 IEM_MC_REF_EFLAGS(pEFlags); \
230 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
231 \
232 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
233 IEM_MC_END(); \
234 } \
235 else \
236 { \
237 /* \
238 * We're accessing memory. \
239 */ \
240 IEM_MC_BEGIN(3, 1); \
241 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
242 IEM_MC_ARG(uint8_t, u8Src, 1); \
243 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
245 \
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
248 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
249 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
250 IEM_MC_REF_EFLAGS(pEFlags); \
251 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
252 \
253 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
254 IEM_MC_END(); \
255 } \
256 (void)0
257
258
259/**
260 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
261 * memory/register as the destination.
262 */
263#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
265 \
266 /* \
267 * If rm is denoting a register, no more instruction bytes. \
268 */ \
269 if (IEM_IS_MODRM_REG_MODE(bRm)) \
270 { \
271 switch (pVCpu->iem.s.enmEffOpSize) \
272 { \
273 case IEMMODE_16BIT: \
274 IEM_MC_BEGIN(3, 0); \
275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
276 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
277 IEM_MC_ARG(uint16_t, u16Src, 1); \
278 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
279 \
280 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
281 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
282 IEM_MC_REF_EFLAGS(pEFlags); \
283 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
284 \
285 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
286 IEM_MC_END(); \
287 break; \
288 \
289 case IEMMODE_32BIT: \
290 IEM_MC_BEGIN(3, 0); \
291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
292 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
293 IEM_MC_ARG(uint32_t, u32Src, 1); \
294 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
295 \
296 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
297 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
298 IEM_MC_REF_EFLAGS(pEFlags); \
299 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
300 \
301 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
302 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
303 IEM_MC_END(); \
304 break; \
305 \
306 case IEMMODE_64BIT: \
307 IEM_MC_BEGIN(3, 0); \
308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
309 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
310 IEM_MC_ARG(uint64_t, u64Src, 1); \
311 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
312 \
313 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
314 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
315 IEM_MC_REF_EFLAGS(pEFlags); \
316 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
317 \
318 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
319 IEM_MC_END(); \
320 break; \
321 \
322 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
323 } \
324 } \
325 else \
326 { \
327 /* \
328 * We're accessing memory. \
329 * Note! We're putting the eflags on the stack here so we can commit them \
330 * after the memory. \
331 */ \
332 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
333 { \
334 switch (pVCpu->iem.s.enmEffOpSize) \
335 { \
336 case IEMMODE_16BIT: \
337 IEM_MC_BEGIN(3, 3); \
338 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
339 IEM_MC_ARG(uint16_t, u16Src, 1); \
340 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
342 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
343 \
344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
345 IEMOP_HLP_DONE_DECODING(); \
346 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
347 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
348 IEM_MC_FETCH_EFLAGS(EFlags); \
349 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
350 \
351 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
352 IEM_MC_COMMIT_EFLAGS(EFlags); \
353 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
354 IEM_MC_END(); \
355 break; \
356 \
357 case IEMMODE_32BIT: \
358 IEM_MC_BEGIN(3, 3); \
359 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
360 IEM_MC_ARG(uint32_t, u32Src, 1); \
361 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
363 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
364 \
365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
366 IEMOP_HLP_DONE_DECODING(); \
367 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
368 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
369 IEM_MC_FETCH_EFLAGS(EFlags); \
370 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
371 \
372 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
373 IEM_MC_COMMIT_EFLAGS(EFlags); \
374 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
375 IEM_MC_END(); \
376 break; \
377 \
378 case IEMMODE_64BIT: \
379 IEM_MC_BEGIN(3, 3); \
380 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
381 IEM_MC_ARG(uint64_t, u64Src, 1); \
382 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
384 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
385 \
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
387 IEMOP_HLP_DONE_DECODING(); \
388 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
389 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
390 IEM_MC_FETCH_EFLAGS(EFlags); \
391 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
392 \
393 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
394 IEM_MC_COMMIT_EFLAGS(EFlags); \
395 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
396 IEM_MC_END(); \
397 break; \
398 \
399 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
400 } \
401 } \
402 else \
403 { \
404 (void)0
405/* Separate macro to work around parsing issue in IEMAllInstPython.py */
406#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
407 switch (pVCpu->iem.s.enmEffOpSize) \
408 { \
409 case IEMMODE_16BIT: \
410 IEM_MC_BEGIN(3, 3); \
411 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
412 IEM_MC_ARG(uint16_t, u16Src, 1); \
413 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
415 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
416 \
417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
418 IEMOP_HLP_DONE_DECODING(); \
419 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
420 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
421 IEM_MC_FETCH_EFLAGS(EFlags); \
422 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
423 \
424 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
425 IEM_MC_COMMIT_EFLAGS(EFlags); \
426 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
427 IEM_MC_END(); \
428 break; \
429 \
430 case IEMMODE_32BIT: \
431 IEM_MC_BEGIN(3, 3); \
432 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
433 IEM_MC_ARG(uint32_t, u32Src, 1); \
434 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
436 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
437 \
438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
439 IEMOP_HLP_DONE_DECODING(); \
440 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
441 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
442 IEM_MC_FETCH_EFLAGS(EFlags); \
443 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
444 \
445 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo /* CMP,TEST */); \
446 IEM_MC_COMMIT_EFLAGS(EFlags); \
447 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
448 IEM_MC_END(); \
449 break; \
450 \
451 case IEMMODE_64BIT: \
452 IEM_MC_BEGIN(3, 3); \
453 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
454 IEM_MC_ARG(uint64_t, u64Src, 1); \
455 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
457 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
458 \
459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
460 IEMOP_HLP_DONE_DECODING(); \
461 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
462 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
463 IEM_MC_FETCH_EFLAGS(EFlags); \
464 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
465 \
466 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
467 IEM_MC_COMMIT_EFLAGS(EFlags); \
468 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
469 IEM_MC_END(); \
470 break; \
471 \
472 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
473 } \
474 } \
475 } \
476 (void)0
477
478/**
479 * Body for read-only word/dword/qword instructions like TEST and CMP with
480 * memory/register as the destination.
481 */
482#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
484 \
485 /* \
486 * If rm is denoting a register, no more instruction bytes. \
487 */ \
488 if (IEM_IS_MODRM_REG_MODE(bRm)) \
489 { \
490 switch (pVCpu->iem.s.enmEffOpSize) \
491 { \
492 case IEMMODE_16BIT: \
493 IEM_MC_BEGIN(3, 0); \
494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
495 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
496 IEM_MC_ARG(uint16_t, u16Src, 1); \
497 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
498 \
499 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
500 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
501 IEM_MC_REF_EFLAGS(pEFlags); \
502 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
503 \
504 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
505 IEM_MC_END(); \
506 break; \
507 \
508 case IEMMODE_32BIT: \
509 IEM_MC_BEGIN(3, 0); \
510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
511 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
512 IEM_MC_ARG(uint32_t, u32Src, 1); \
513 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
514 \
515 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
516 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
517 IEM_MC_REF_EFLAGS(pEFlags); \
518 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
519 \
520 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
521 IEM_MC_END(); \
522 break; \
523 \
524 case IEMMODE_64BIT: \
525 IEM_MC_BEGIN(3, 0); \
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
527 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
528 IEM_MC_ARG(uint64_t, u64Src, 1); \
529 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
530 \
531 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
532 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
533 IEM_MC_REF_EFLAGS(pEFlags); \
534 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
535 \
536 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
537 IEM_MC_END(); \
538 break; \
539 \
540 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
541 } \
542 } \
543 else \
544 { \
545 /* \
546 * We're accessing memory. \
547 * Note! We're putting the eflags on the stack here so we can commit them \
548 * after the memory. \
549 */ \
550 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
551 { \
552 switch (pVCpu->iem.s.enmEffOpSize) \
553 { \
554 case IEMMODE_16BIT: \
555 IEM_MC_BEGIN(3, 3); \
556 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
557 IEM_MC_ARG(uint16_t, u16Src, 1); \
558 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
560 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
561 \
562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
563 IEMOP_HLP_DONE_DECODING(); \
564 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
565 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
566 IEM_MC_FETCH_EFLAGS(EFlags); \
567 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
568 \
569 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
570 IEM_MC_COMMIT_EFLAGS(EFlags); \
571 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
572 IEM_MC_END(); \
573 break; \
574 \
575 case IEMMODE_32BIT: \
576 IEM_MC_BEGIN(3, 3); \
577 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
578 IEM_MC_ARG(uint32_t, u32Src, 1); \
579 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
581 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
582 \
583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
584 IEMOP_HLP_DONE_DECODING(); \
585 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
586 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
587 IEM_MC_FETCH_EFLAGS(EFlags); \
588 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
589 \
590 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
591 IEM_MC_COMMIT_EFLAGS(EFlags); \
592 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
593 IEM_MC_END(); \
594 break; \
595 \
596 case IEMMODE_64BIT: \
597 IEM_MC_BEGIN(3, 3); \
598 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
599 IEM_MC_ARG(uint64_t, u64Src, 1); \
600 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
602 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
603 \
604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
605 IEMOP_HLP_DONE_DECODING(); \
606 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
607 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
608 IEM_MC_FETCH_EFLAGS(EFlags); \
609 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
610 \
611 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
612 IEM_MC_COMMIT_EFLAGS(EFlags); \
613 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
614 IEM_MC_END(); \
615 break; \
616 \
617 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
618 } \
619 } \
620 else \
621 { \
622 IEMOP_HLP_DONE_DECODING(); \
623 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
624 } \
625 } \
626 (void)0
627
628
629/**
630 * Body for instructions like ADD, AND, OR, ++ with working on AL with
631 * a byte immediate.
632 */
633#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
634 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
635 \
636 IEM_MC_BEGIN(3, 0); \
637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
638 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
639 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
640 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
641 \
642 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
643 IEM_MC_REF_EFLAGS(pEFlags); \
644 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
645 \
646 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
647 IEM_MC_END()
648
649/**
650 * Body for instructions like ADD, AND, OR, ++ with working on
651 * AX/EAX/RAX with a word/dword immediate.
652 */
653#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
654 switch (pVCpu->iem.s.enmEffOpSize) \
655 { \
656 case IEMMODE_16BIT: \
657 { \
658 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
659 \
660 IEM_MC_BEGIN(3, 0); \
661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
662 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
663 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
664 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
665 \
666 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
667 IEM_MC_REF_EFLAGS(pEFlags); \
668 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
669 \
670 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
671 IEM_MC_END(); \
672 } \
673 \
674 case IEMMODE_32BIT: \
675 { \
676 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
677 \
678 IEM_MC_BEGIN(3, 0); \
679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
680 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
681 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
682 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
683 \
684 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
685 IEM_MC_REF_EFLAGS(pEFlags); \
686 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
687 \
688 if (a_fModifiesDstReg) \
689 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
690 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
691 IEM_MC_END(); \
692 } \
693 \
694 case IEMMODE_64BIT: \
695 { \
696 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
697 \
698 IEM_MC_BEGIN(3, 0); \
699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
700 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
701 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
702 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
703 \
704 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
705 IEM_MC_REF_EFLAGS(pEFlags); \
706 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
707 \
708 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
709 IEM_MC_END(); \
710 } \
711 \
712 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
713 } \
714 (void)0
715
716
717
718/* Instruction specification format - work in progress: */
719
720/**
721 * @opcode 0x00
722 * @opmnemonic add
723 * @op1 rm:Eb
724 * @op2 reg:Gb
725 * @opmaps one
726 * @openc ModR/M
727 * @opflmodify cf,pf,af,zf,sf,of
728 * @ophints harmless ignores_op_sizes
729 * @opstats add_Eb_Gb
730 * @opgroup og_gen_arith_bin
731 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
732 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
733 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
734 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
735 */
736FNIEMOP_DEF(iemOp_add_Eb_Gb)
737{
738 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
739 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_add_u8);
740 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked);
741}
742
743
744/**
745 * @opcode 0x01
746 * @opgroup og_gen_arith_bin
747 * @opflmodify cf,pf,af,zf,sf,of
748 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
749 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
750 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
751 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
752 */
753FNIEMOP_DEF(iemOp_add_Ev_Gv)
754{
755 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
756 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
757 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
758}
759
760
761/**
762 * @opcode 0x02
763 * @opgroup og_gen_arith_bin
764 * @opflmodify cf,pf,af,zf,sf,of
765 * @opcopytests iemOp_add_Eb_Gb
766 */
767FNIEMOP_DEF(iemOp_add_Gb_Eb)
768{
769 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
770 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
771}
772
773
774/**
775 * @opcode 0x03
776 * @opgroup og_gen_arith_bin
777 * @opflmodify cf,pf,af,zf,sf,of
778 * @opcopytests iemOp_add_Ev_Gv
779 */
780FNIEMOP_DEF(iemOp_add_Gv_Ev)
781{
782 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
783 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
784}
785
786
787/**
788 * @opcode 0x04
789 * @opgroup og_gen_arith_bin
790 * @opflmodify cf,pf,af,zf,sf,of
791 * @opcopytests iemOp_add_Eb_Gb
792 */
793FNIEMOP_DEF(iemOp_add_Al_Ib)
794{
795 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
796 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
797}
798
799
800/**
801 * @opcode 0x05
802 * @opgroup og_gen_arith_bin
803 * @opflmodify cf,pf,af,zf,sf,of
804 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
805 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
806 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
807 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
808 */
809FNIEMOP_DEF(iemOp_add_eAX_Iz)
810{
811 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
812 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
813}
814
815
816/**
817 * @opcode 0x06
818 * @opgroup og_stack_sreg
819 */
820FNIEMOP_DEF(iemOp_push_ES)
821{
822 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
823 IEMOP_HLP_NO_64BIT();
824 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
825}
826
827
828/**
829 * @opcode 0x07
830 * @opgroup og_stack_sreg
831 */
832FNIEMOP_DEF(iemOp_pop_ES)
833{
834 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
835 IEMOP_HLP_NO_64BIT();
836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
837 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
838}
839
840
841/**
842 * @opcode 0x08
843 * @opgroup og_gen_arith_bin
844 * @opflmodify cf,pf,af,zf,sf,of
845 * @opflundef af
846 * @opflclear of,cf
847 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
848 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
849 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
850 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
851 */
852FNIEMOP_DEF(iemOp_or_Eb_Gb)
853{
854 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
855 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
856 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_or_u8);
857 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked);
858}
859
860
861/*
862 * @opcode 0x09
863 * @opgroup og_gen_arith_bin
864 * @opflmodify cf,pf,af,zf,sf,of
865 * @opflundef af
866 * @opflclear of,cf
867 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
868 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
869 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
870 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
871 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
872 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
873 */
874FNIEMOP_DEF(iemOp_or_Ev_Gv)
875{
876 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
877 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
878 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
879 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
880}
881
882
883/**
884 * @opcode 0x0a
885 * @opgroup og_gen_arith_bin
886 * @opflmodify cf,pf,af,zf,sf,of
887 * @opflundef af
888 * @opflclear of,cf
889 * @opcopytests iemOp_or_Eb_Gb
890 */
891FNIEMOP_DEF(iemOp_or_Gb_Eb)
892{
893 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
894 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
895 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
896}
897
898
899/**
900 * @opcode 0x0b
901 * @opgroup og_gen_arith_bin
902 * @opflmodify cf,pf,af,zf,sf,of
903 * @opflundef af
904 * @opflclear of,cf
905 * @opcopytests iemOp_or_Ev_Gv
906 */
907FNIEMOP_DEF(iemOp_or_Gv_Ev)
908{
909 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
910 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
911 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
912}
913
914
915/**
916 * @opcode 0x0c
917 * @opgroup og_gen_arith_bin
918 * @opflmodify cf,pf,af,zf,sf,of
919 * @opflundef af
920 * @opflclear of,cf
921 * @opcopytests iemOp_or_Eb_Gb
922 */
923FNIEMOP_DEF(iemOp_or_Al_Ib)
924{
925 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
926 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
927 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
928}
929
930
931/**
932 * @opcode 0x0d
933 * @opgroup og_gen_arith_bin
934 * @opflmodify cf,pf,af,zf,sf,of
935 * @opflundef af
936 * @opflclear of,cf
937 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
938 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
939 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
940 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
941 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
942 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
943 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
944 */
945FNIEMOP_DEF(iemOp_or_eAX_Iz)
946{
947 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
948 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
949 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
950}
951
952
953/**
954 * @opcode 0x0e
955 * @opgroup og_stack_sreg
956 */
957FNIEMOP_DEF(iemOp_push_CS)
958{
959 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
960 IEMOP_HLP_NO_64BIT();
961 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
962}
963
964
965/**
966 * @opcode 0x0f
967 * @opmnemonic EscTwo0f
968 * @openc two0f
969 * @opdisenum OP_2B_ESC
970 * @ophints harmless
971 * @opgroup og_escapes
972 */
973FNIEMOP_DEF(iemOp_2byteEscape)
974{
975#if 0 /// @todo def VBOX_STRICT
976 /* Sanity check the table the first time around. */
977 static bool s_fTested = false;
978 if (RT_LIKELY(s_fTested)) { /* likely */ }
979 else
980 {
981 s_fTested = true;
982 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
983 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
984 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
985 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
986 }
987#endif
988
989 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
990 {
991 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
992 IEMOP_HLP_MIN_286();
993 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
994 }
995 /* @opdone */
996
997 /*
998 * On the 8086 this is a POP CS instruction.
999 * For the time being we don't specify this this.
1000 */
1001 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
1002 IEMOP_HLP_NO_64BIT();
1003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1004 /** @todo eliminate END_TB here */
1005 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1006 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1007}
1008
1009/**
1010 * @opcode 0x10
1011 * @opgroup og_gen_arith_bin
1012 * @opfltest cf
1013 * @opflmodify cf,pf,af,zf,sf,of
1014 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1015 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1016 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1017 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1018 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1019 */
1020FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1021{
1022 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1023 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_adc_u8);
1024 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked);
1025}
1026
1027
1028/**
1029 * @opcode 0x11
1030 * @opgroup og_gen_arith_bin
1031 * @opfltest cf
1032 * @opflmodify cf,pf,af,zf,sf,of
1033 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1034 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1035 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1036 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1037 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1038 */
1039FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1040{
1041 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1042 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
1043 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1044}
1045
1046
1047/**
1048 * @opcode 0x12
1049 * @opgroup og_gen_arith_bin
1050 * @opfltest cf
1051 * @opflmodify cf,pf,af,zf,sf,of
1052 * @opcopytests iemOp_adc_Eb_Gb
1053 */
1054FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1055{
1056 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1057 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
1058}
1059
1060
1061/**
1062 * @opcode 0x13
1063 * @opgroup og_gen_arith_bin
1064 * @opfltest cf
1065 * @opflmodify cf,pf,af,zf,sf,of
1066 * @opcopytests iemOp_adc_Ev_Gv
1067 */
1068FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1069{
1070 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1071 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1072}
1073
1074
1075/**
1076 * @opcode 0x14
1077 * @opgroup og_gen_arith_bin
1078 * @opfltest cf
1079 * @opflmodify cf,pf,af,zf,sf,of
1080 * @opcopytests iemOp_adc_Eb_Gb
1081 */
1082FNIEMOP_DEF(iemOp_adc_Al_Ib)
1083{
1084 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1085 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1086}
1087
1088
1089/**
1090 * @opcode 0x15
1091 * @opgroup og_gen_arith_bin
1092 * @opfltest cf
1093 * @opflmodify cf,pf,af,zf,sf,of
1094 * @opcopytests iemOp_adc_Ev_Gv
1095 */
1096FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1097{
1098 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1099 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1100}
1101
1102
1103/**
1104 * @opcode 0x16
1105 */
1106FNIEMOP_DEF(iemOp_push_SS)
1107{
1108 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1109 IEMOP_HLP_NO_64BIT();
1110 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1111}
1112
1113
1114/**
1115 * @opcode 0x17
1116 * @opgroup og_gen_arith_bin
1117 * @opfltest cf
1118 * @opflmodify cf,pf,af,zf,sf,of
1119 */
1120FNIEMOP_DEF(iemOp_pop_SS)
1121{
1122 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1124 IEMOP_HLP_NO_64BIT();
1125 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1126}
1127
1128
1129/**
1130 * @opcode 0x18
1131 * @opgroup og_gen_arith_bin
1132 * @opfltest cf
1133 * @opflmodify cf,pf,af,zf,sf,of
1134 */
1135FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1136{
1137 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1138 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sbb_u8);
1139 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked);
1140}
1141
1142
1143/**
1144 * @opcode 0x19
1145 * @opgroup og_gen_arith_bin
1146 * @opfltest cf
1147 * @opflmodify cf,pf,af,zf,sf,of
1148 */
1149FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1150{
1151 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1152 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
1153 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1154}
1155
1156
1157/**
1158 * @opcode 0x1a
1159 * @opgroup og_gen_arith_bin
1160 * @opfltest cf
1161 * @opflmodify cf,pf,af,zf,sf,of
1162 */
1163FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1164{
1165 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1166 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
1167}
1168
1169
1170/**
1171 * @opcode 0x1b
1172 * @opgroup og_gen_arith_bin
1173 * @opfltest cf
1174 * @opflmodify cf,pf,af,zf,sf,of
1175 */
1176FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1177{
1178 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1179 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1180}
1181
1182
1183/**
1184 * @opcode 0x1c
1185 * @opgroup og_gen_arith_bin
1186 * @opfltest cf
1187 * @opflmodify cf,pf,af,zf,sf,of
1188 */
1189FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1190{
1191 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1192 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1193}
1194
1195
1196/**
1197 * @opcode 0x1d
1198 * @opgroup og_gen_arith_bin
1199 * @opfltest cf
1200 * @opflmodify cf,pf,af,zf,sf,of
1201 */
1202FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1203{
1204 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1205 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1206}
1207
1208
1209/**
1210 * @opcode 0x1e
1211 * @opgroup og_stack_sreg
1212 */
1213FNIEMOP_DEF(iemOp_push_DS)
1214{
1215 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1216 IEMOP_HLP_NO_64BIT();
1217 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1218}
1219
1220
1221/**
1222 * @opcode 0x1f
1223 * @opgroup og_stack_sreg
1224 */
1225FNIEMOP_DEF(iemOp_pop_DS)
1226{
1227 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1229 IEMOP_HLP_NO_64BIT();
1230 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1231}
1232
1233
1234/**
1235 * @opcode 0x20
1236 * @opgroup og_gen_arith_bin
1237 * @opflmodify cf,pf,af,zf,sf,of
1238 * @opflundef af
1239 * @opflclear of,cf
1240 */
1241FNIEMOP_DEF(iemOp_and_Eb_Gb)
1242{
1243 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1244 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1245 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_and_u8);
1246 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked);
1247}
1248
1249
1250/**
1251 * @opcode 0x21
1252 * @opgroup og_gen_arith_bin
1253 * @opflmodify cf,pf,af,zf,sf,of
1254 * @opflundef af
1255 * @opflclear of,cf
1256 */
1257FNIEMOP_DEF(iemOp_and_Ev_Gv)
1258{
1259 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1260 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1261 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
1262 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1263}
1264
1265
1266/**
1267 * @opcode 0x22
1268 * @opgroup og_gen_arith_bin
1269 * @opflmodify cf,pf,af,zf,sf,of
1270 * @opflundef af
1271 * @opflclear of,cf
1272 */
1273FNIEMOP_DEF(iemOp_and_Gb_Eb)
1274{
1275 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1276 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1277 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1278}
1279
1280
1281/**
1282 * @opcode 0x23
1283 * @opgroup og_gen_arith_bin
1284 * @opflmodify cf,pf,af,zf,sf,of
1285 * @opflundef af
1286 * @opflclear of,cf
1287 */
1288FNIEMOP_DEF(iemOp_and_Gv_Ev)
1289{
1290 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1291 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1292 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1293}
1294
1295
1296/**
1297 * @opcode 0x24
1298 * @opgroup og_gen_arith_bin
1299 * @opflmodify cf,pf,af,zf,sf,of
1300 * @opflundef af
1301 * @opflclear of,cf
1302 */
1303FNIEMOP_DEF(iemOp_and_Al_Ib)
1304{
1305 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1306 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1307 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1308}
1309
1310
1311/**
1312 * @opcode 0x25
1313 * @opgroup og_gen_arith_bin
1314 * @opflmodify cf,pf,af,zf,sf,of
1315 * @opflundef af
1316 * @opflclear of,cf
1317 */
1318FNIEMOP_DEF(iemOp_and_eAX_Iz)
1319{
1320 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1321 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1322 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1323}
1324
1325
1326/**
1327 * @opcode 0x26
1328 * @opmnemonic SEG
1329 * @op1 ES
1330 * @opgroup og_prefix
1331 * @openc prefix
1332 * @opdisenum OP_SEG
1333 * @ophints harmless
1334 */
1335FNIEMOP_DEF(iemOp_seg_ES)
1336{
1337 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1338 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1339 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1340
1341 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1342 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1343}
1344
1345
1346/**
1347 * @opcode 0x27
1348 * @opfltest af,cf
1349 * @opflmodify cf,pf,af,zf,sf,of
1350 * @opflundef of
1351 */
1352FNIEMOP_DEF(iemOp_daa)
1353{
1354 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1355 IEMOP_HLP_NO_64BIT();
1356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1357 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1358 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_daa);
1359}
1360
1361
1362/**
1363 * @opcode 0x28
1364 * @opgroup og_gen_arith_bin
1365 * @opflmodify cf,pf,af,zf,sf,of
1366 */
1367FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1368{
1369 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1370 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sub_u8);
1371 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked);
1372}
1373
1374
1375/**
1376 * @opcode 0x29
1377 * @opgroup og_gen_arith_bin
1378 * @opflmodify cf,pf,af,zf,sf,of
1379 */
1380FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1381{
1382 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1383 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
1384 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1385}
1386
1387
1388/**
1389 * @opcode 0x2a
1390 * @opgroup og_gen_arith_bin
1391 * @opflmodify cf,pf,af,zf,sf,of
1392 */
1393FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1394{
1395 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1396 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1397}
1398
1399
1400/**
1401 * @opcode 0x2b
1402 * @opgroup og_gen_arith_bin
1403 * @opflmodify cf,pf,af,zf,sf,of
1404 */
1405FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1406{
1407 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1408 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1409}
1410
1411
1412/**
1413 * @opcode 0x2c
1414 * @opgroup og_gen_arith_bin
1415 * @opflmodify cf,pf,af,zf,sf,of
1416 */
1417FNIEMOP_DEF(iemOp_sub_Al_Ib)
1418{
1419 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1420 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1421}
1422
1423
1424/**
1425 * @opcode 0x2d
1426 * @opgroup og_gen_arith_bin
1427 * @opflmodify cf,pf,af,zf,sf,of
1428 */
1429FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1430{
1431 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1432 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1433}
1434
1435
1436/**
1437 * @opcode 0x2e
1438 * @opmnemonic SEG
1439 * @op1 CS
1440 * @opgroup og_prefix
1441 * @openc prefix
1442 * @opdisenum OP_SEG
1443 * @ophints harmless
1444 */
1445FNIEMOP_DEF(iemOp_seg_CS)
1446{
1447 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1448 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1449 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1450
1451 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1452 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1453}
1454
1455
1456/**
1457 * @opcode 0x2f
1458 * @opfltest af,cf
1459 * @opflmodify cf,pf,af,zf,sf,of
1460 * @opflundef of
1461 */
1462FNIEMOP_DEF(iemOp_das)
1463{
1464 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1465 IEMOP_HLP_NO_64BIT();
1466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1467 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1468 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_das);
1469}
1470
1471
1472/**
1473 * @opcode 0x30
1474 * @opgroup og_gen_arith_bin
1475 * @opflmodify cf,pf,af,zf,sf,of
1476 * @opflundef af
1477 * @opflclear of,cf
1478 */
1479FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1480{
1481 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1482 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1483 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_xor_u8);
1484 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked);
1485}
1486
1487
1488/**
1489 * @opcode 0x31
1490 * @opgroup og_gen_arith_bin
1491 * @opflmodify cf,pf,af,zf,sf,of
1492 * @opflundef af
1493 * @opflclear of,cf
1494 */
1495FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1496{
1497 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1498 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1499 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
1500 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1501}
1502
1503
1504/**
1505 * @opcode 0x32
1506 * @opgroup og_gen_arith_bin
1507 * @opflmodify cf,pf,af,zf,sf,of
1508 * @opflundef af
1509 * @opflclear of,cf
1510 */
1511FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1512{
1513 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1514 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1515 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1516}
1517
1518
1519/**
1520 * @opcode 0x33
1521 * @opgroup og_gen_arith_bin
1522 * @opflmodify cf,pf,af,zf,sf,of
1523 * @opflundef af
1524 * @opflclear of,cf
1525 */
1526FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1527{
1528 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1529 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1530 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1531}
1532
1533
1534/**
1535 * @opcode 0x34
1536 * @opgroup og_gen_arith_bin
1537 * @opflmodify cf,pf,af,zf,sf,of
1538 * @opflundef af
1539 * @opflclear of,cf
1540 */
1541FNIEMOP_DEF(iemOp_xor_Al_Ib)
1542{
1543 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1544 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1545 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1546}
1547
1548
1549/**
1550 * @opcode 0x35
1551 * @opgroup og_gen_arith_bin
1552 * @opflmodify cf,pf,af,zf,sf,of
1553 * @opflundef af
1554 * @opflclear of,cf
1555 */
1556FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1557{
1558 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1559 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1560 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1561}
1562
1563
1564/**
1565 * @opcode 0x36
1566 * @opmnemonic SEG
1567 * @op1 SS
1568 * @opgroup og_prefix
1569 * @openc prefix
1570 * @opdisenum OP_SEG
1571 * @ophints harmless
1572 */
1573FNIEMOP_DEF(iemOp_seg_SS)
1574{
1575 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1576 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1577 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1578
1579 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1580 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1581}
1582
1583
1584/**
1585 * @opcode 0x37
1586 * @opfltest af,cf
1587 * @opflmodify cf,pf,af,zf,sf,of
1588 * @opflundef pf,zf,sf,of
1589 * @opgroup og_gen_arith_dec
1590 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1591 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1592 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1593 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1594 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1595 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1596 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1597 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1598 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1599 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1600 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1601 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1602 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1603 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1604 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1605 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1606 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1607 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1608 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1609 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1610 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1611 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1612 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1613 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1614 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1615 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1616 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1617 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1618 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1619 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1620 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1621 */
1622FNIEMOP_DEF(iemOp_aaa)
1623{
1624 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1625 IEMOP_HLP_NO_64BIT();
1626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1627 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1628
1629 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aaa);
1630}
1631
1632
1633/**
1634 * @opcode 0x38
1635 */
1636FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1637{
1638 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1639 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8);
1640 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
1641}
1642
1643
1644/**
1645 * @opcode 0x39
1646 */
1647FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1648{
1649 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1650 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
1651}
1652
1653
1654/**
1655 * @opcode 0x3a
1656 */
1657FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1658{
1659 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1660 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1661}
1662
1663
1664/**
1665 * @opcode 0x3b
1666 */
1667FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1668{
1669 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1670 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1671}
1672
1673
1674/**
1675 * @opcode 0x3c
1676 */
1677FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1678{
1679 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1680 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1681}
1682
1683
1684/**
1685 * @opcode 0x3d
1686 */
1687FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1688{
1689 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1690 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1691}
1692
1693
1694/**
1695 * @opcode 0x3e
1696 */
1697FNIEMOP_DEF(iemOp_seg_DS)
1698{
1699 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1700 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1701 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1702
1703 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1704 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1705}
1706
1707
1708/**
1709 * @opcode 0x3f
1710 * @opfltest af,cf
1711 * @opflmodify cf,pf,af,zf,sf,of
1712 * @opflundef pf,zf,sf,of
1713 * @opgroup og_gen_arith_dec
1714 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1715 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1716 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1717 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1718 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1719 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1720 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1721 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1722 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1723 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1724 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1725 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1726 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1727 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1728 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1729 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1730 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1731 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1732 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1733 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1734 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1735 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1736 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1737 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1738 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1739 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1740 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1741 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1742 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1743 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1744 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1745 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1746 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1747 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1748 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1749 */
1750FNIEMOP_DEF(iemOp_aas)
1751{
1752 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1753 IEMOP_HLP_NO_64BIT();
1754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1755 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1756
1757 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aas);
1758}
1759
1760
1761/**
1762 * Common 'inc/dec register' helper.
1763 *
1764 * Not for 64-bit code, only for what became the rex prefixes.
1765 */
1766#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1767 switch (pVCpu->iem.s.enmEffOpSize) \
1768 { \
1769 case IEMMODE_16BIT: \
1770 IEM_MC_BEGIN(2, 0); \
1771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1772 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1773 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1774 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1775 IEM_MC_REF_EFLAGS(pEFlags); \
1776 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1777 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1778 IEM_MC_END(); \
1779 break; \
1780 \
1781 case IEMMODE_32BIT: \
1782 IEM_MC_BEGIN(2, 0); \
1783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1784 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1785 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1786 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1787 IEM_MC_REF_EFLAGS(pEFlags); \
1788 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1789 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
1790 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1791 IEM_MC_END(); \
1792 break; \
1793 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1794 } \
1795 (void)0
1796
1797/**
1798 * @opcode 0x40
1799 */
1800FNIEMOP_DEF(iemOp_inc_eAX)
1801{
1802 /*
1803 * This is a REX prefix in 64-bit mode.
1804 */
1805 if (IEM_IS_64BIT_CODE(pVCpu))
1806 {
1807 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1808 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1809
1810 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1811 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1812 }
1813
1814 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1815 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1816}
1817
1818
1819/**
1820 * @opcode 0x41
1821 */
1822FNIEMOP_DEF(iemOp_inc_eCX)
1823{
1824 /*
1825 * This is a REX prefix in 64-bit mode.
1826 */
1827 if (IEM_IS_64BIT_CODE(pVCpu))
1828 {
1829 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1830 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1831 pVCpu->iem.s.uRexB = 1 << 3;
1832
1833 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1834 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1835 }
1836
1837 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1838 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1839}
1840
1841
1842/**
1843 * @opcode 0x42
1844 */
1845FNIEMOP_DEF(iemOp_inc_eDX)
1846{
1847 /*
1848 * This is a REX prefix in 64-bit mode.
1849 */
1850 if (IEM_IS_64BIT_CODE(pVCpu))
1851 {
1852 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1853 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1854 pVCpu->iem.s.uRexIndex = 1 << 3;
1855
1856 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1857 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1858 }
1859
1860 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1861 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1862}
1863
1864
1865
1866/**
1867 * @opcode 0x43
1868 */
1869FNIEMOP_DEF(iemOp_inc_eBX)
1870{
1871 /*
1872 * This is a REX prefix in 64-bit mode.
1873 */
1874 if (IEM_IS_64BIT_CODE(pVCpu))
1875 {
1876 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1877 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1878 pVCpu->iem.s.uRexB = 1 << 3;
1879 pVCpu->iem.s.uRexIndex = 1 << 3;
1880
1881 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1882 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1883 }
1884
1885 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1886 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1887}
1888
1889
1890/**
1891 * @opcode 0x44
1892 */
1893FNIEMOP_DEF(iemOp_inc_eSP)
1894{
1895 /*
1896 * This is a REX prefix in 64-bit mode.
1897 */
1898 if (IEM_IS_64BIT_CODE(pVCpu))
1899 {
1900 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1901 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1902 pVCpu->iem.s.uRexReg = 1 << 3;
1903
1904 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1905 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1906 }
1907
1908 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1909 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1910}
1911
1912
1913/**
1914 * @opcode 0x45
1915 */
1916FNIEMOP_DEF(iemOp_inc_eBP)
1917{
1918 /*
1919 * This is a REX prefix in 64-bit mode.
1920 */
1921 if (IEM_IS_64BIT_CODE(pVCpu))
1922 {
1923 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1924 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1925 pVCpu->iem.s.uRexReg = 1 << 3;
1926 pVCpu->iem.s.uRexB = 1 << 3;
1927
1928 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1929 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1930 }
1931
1932 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1933 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1934}
1935
1936
1937/**
1938 * @opcode 0x46
1939 */
1940FNIEMOP_DEF(iemOp_inc_eSI)
1941{
1942 /*
1943 * This is a REX prefix in 64-bit mode.
1944 */
1945 if (IEM_IS_64BIT_CODE(pVCpu))
1946 {
1947 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1948 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1949 pVCpu->iem.s.uRexReg = 1 << 3;
1950 pVCpu->iem.s.uRexIndex = 1 << 3;
1951
1952 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1953 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1954 }
1955
1956 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1957 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1958}
1959
1960
1961/**
1962 * @opcode 0x47
1963 */
1964FNIEMOP_DEF(iemOp_inc_eDI)
1965{
1966 /*
1967 * This is a REX prefix in 64-bit mode.
1968 */
1969 if (IEM_IS_64BIT_CODE(pVCpu))
1970 {
1971 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1972 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1973 pVCpu->iem.s.uRexReg = 1 << 3;
1974 pVCpu->iem.s.uRexB = 1 << 3;
1975 pVCpu->iem.s.uRexIndex = 1 << 3;
1976
1977 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1978 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1979 }
1980
1981 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1982 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
1983}
1984
1985
1986/**
1987 * @opcode 0x48
1988 */
1989FNIEMOP_DEF(iemOp_dec_eAX)
1990{
1991 /*
1992 * This is a REX prefix in 64-bit mode.
1993 */
1994 if (IEM_IS_64BIT_CODE(pVCpu))
1995 {
1996 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1997 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1998 iemRecalEffOpSize(pVCpu);
1999
2000 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2001 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2002 }
2003
2004 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2005 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2006}
2007
2008
2009/**
2010 * @opcode 0x49
2011 */
2012FNIEMOP_DEF(iemOp_dec_eCX)
2013{
2014 /*
2015 * This is a REX prefix in 64-bit mode.
2016 */
2017 if (IEM_IS_64BIT_CODE(pVCpu))
2018 {
2019 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2020 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2021 pVCpu->iem.s.uRexB = 1 << 3;
2022 iemRecalEffOpSize(pVCpu);
2023
2024 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2025 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2026 }
2027
2028 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2029 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2030}
2031
2032
2033/**
2034 * @opcode 0x4a
2035 */
2036FNIEMOP_DEF(iemOp_dec_eDX)
2037{
2038 /*
2039 * This is a REX prefix in 64-bit mode.
2040 */
2041 if (IEM_IS_64BIT_CODE(pVCpu))
2042 {
2043 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2044 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2045 pVCpu->iem.s.uRexIndex = 1 << 3;
2046 iemRecalEffOpSize(pVCpu);
2047
2048 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2049 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2050 }
2051
2052 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2053 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2054}
2055
2056
2057/**
2058 * @opcode 0x4b
2059 */
2060FNIEMOP_DEF(iemOp_dec_eBX)
2061{
2062 /*
2063 * This is a REX prefix in 64-bit mode.
2064 */
2065 if (IEM_IS_64BIT_CODE(pVCpu))
2066 {
2067 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2068 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2069 pVCpu->iem.s.uRexB = 1 << 3;
2070 pVCpu->iem.s.uRexIndex = 1 << 3;
2071 iemRecalEffOpSize(pVCpu);
2072
2073 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2074 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2075 }
2076
2077 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2078 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2079}
2080
2081
2082/**
2083 * @opcode 0x4c
2084 */
2085FNIEMOP_DEF(iemOp_dec_eSP)
2086{
2087 /*
2088 * This is a REX prefix in 64-bit mode.
2089 */
2090 if (IEM_IS_64BIT_CODE(pVCpu))
2091 {
2092 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2093 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2094 pVCpu->iem.s.uRexReg = 1 << 3;
2095 iemRecalEffOpSize(pVCpu);
2096
2097 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2098 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2099 }
2100
2101 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2102 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2103}
2104
2105
2106/**
2107 * @opcode 0x4d
2108 */
2109FNIEMOP_DEF(iemOp_dec_eBP)
2110{
2111 /*
2112 * This is a REX prefix in 64-bit mode.
2113 */
2114 if (IEM_IS_64BIT_CODE(pVCpu))
2115 {
2116 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2117 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2118 pVCpu->iem.s.uRexReg = 1 << 3;
2119 pVCpu->iem.s.uRexB = 1 << 3;
2120 iemRecalEffOpSize(pVCpu);
2121
2122 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2123 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2124 }
2125
2126 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2127 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2128}
2129
2130
2131/**
2132 * @opcode 0x4e
2133 */
2134FNIEMOP_DEF(iemOp_dec_eSI)
2135{
2136 /*
2137 * This is a REX prefix in 64-bit mode.
2138 */
2139 if (IEM_IS_64BIT_CODE(pVCpu))
2140 {
2141 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2142 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2143 pVCpu->iem.s.uRexReg = 1 << 3;
2144 pVCpu->iem.s.uRexIndex = 1 << 3;
2145 iemRecalEffOpSize(pVCpu);
2146
2147 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2148 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2149 }
2150
2151 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2152 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2153}
2154
2155
2156/**
2157 * @opcode 0x4f
2158 */
2159FNIEMOP_DEF(iemOp_dec_eDI)
2160{
2161 /*
2162 * This is a REX prefix in 64-bit mode.
2163 */
2164 if (IEM_IS_64BIT_CODE(pVCpu))
2165 {
2166 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2167 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2168 pVCpu->iem.s.uRexReg = 1 << 3;
2169 pVCpu->iem.s.uRexB = 1 << 3;
2170 pVCpu->iem.s.uRexIndex = 1 << 3;
2171 iemRecalEffOpSize(pVCpu);
2172
2173 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2174 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2175 }
2176
2177 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2178 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2179}
2180
2181
2182/**
2183 * Common 'push register' helper.
2184 */
2185FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2186{
2187 if (IEM_IS_64BIT_CODE(pVCpu))
2188 {
2189 iReg |= pVCpu->iem.s.uRexB;
2190 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2191 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2192 }
2193
2194 switch (pVCpu->iem.s.enmEffOpSize)
2195 {
2196 case IEMMODE_16BIT:
2197 IEM_MC_BEGIN(0, 1);
2198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2199 IEM_MC_LOCAL(uint16_t, u16Value);
2200 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2201 IEM_MC_PUSH_U16(u16Value);
2202 IEM_MC_ADVANCE_RIP_AND_FINISH();
2203 IEM_MC_END();
2204 break;
2205
2206 case IEMMODE_32BIT:
2207 IEM_MC_BEGIN(0, 1);
2208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2209 IEM_MC_LOCAL(uint32_t, u32Value);
2210 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2211 IEM_MC_PUSH_U32(u32Value);
2212 IEM_MC_ADVANCE_RIP_AND_FINISH();
2213 IEM_MC_END();
2214 break;
2215
2216 case IEMMODE_64BIT:
2217 IEM_MC_BEGIN(0, 1);
2218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2219 IEM_MC_LOCAL(uint64_t, u64Value);
2220 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2221 IEM_MC_PUSH_U64(u64Value);
2222 IEM_MC_ADVANCE_RIP_AND_FINISH();
2223 IEM_MC_END();
2224 break;
2225
2226 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2227 }
2228}
2229
2230
2231/**
2232 * @opcode 0x50
2233 */
2234FNIEMOP_DEF(iemOp_push_eAX)
2235{
2236 IEMOP_MNEMONIC(push_rAX, "push rAX");
2237 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2238}
2239
2240
2241/**
2242 * @opcode 0x51
2243 */
2244FNIEMOP_DEF(iemOp_push_eCX)
2245{
2246 IEMOP_MNEMONIC(push_rCX, "push rCX");
2247 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2248}
2249
2250
2251/**
2252 * @opcode 0x52
2253 */
2254FNIEMOP_DEF(iemOp_push_eDX)
2255{
2256 IEMOP_MNEMONIC(push_rDX, "push rDX");
2257 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2258}
2259
2260
2261/**
2262 * @opcode 0x53
2263 */
2264FNIEMOP_DEF(iemOp_push_eBX)
2265{
2266 IEMOP_MNEMONIC(push_rBX, "push rBX");
2267 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2268}
2269
2270
2271/**
2272 * @opcode 0x54
2273 */
2274FNIEMOP_DEF(iemOp_push_eSP)
2275{
2276 IEMOP_MNEMONIC(push_rSP, "push rSP");
2277 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
2278 {
2279 IEM_MC_BEGIN(0, 1);
2280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2281 IEM_MC_LOCAL(uint16_t, u16Value);
2282 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2283 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2284 IEM_MC_PUSH_U16(u16Value);
2285 IEM_MC_ADVANCE_RIP_AND_FINISH();
2286 IEM_MC_END();
2287 }
2288 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2289}
2290
2291
2292/**
2293 * @opcode 0x55
2294 */
2295FNIEMOP_DEF(iemOp_push_eBP)
2296{
2297 IEMOP_MNEMONIC(push_rBP, "push rBP");
2298 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2299}
2300
2301
2302/**
2303 * @opcode 0x56
2304 */
2305FNIEMOP_DEF(iemOp_push_eSI)
2306{
2307 IEMOP_MNEMONIC(push_rSI, "push rSI");
2308 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2309}
2310
2311
2312/**
2313 * @opcode 0x57
2314 */
2315FNIEMOP_DEF(iemOp_push_eDI)
2316{
2317 IEMOP_MNEMONIC(push_rDI, "push rDI");
2318 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2319}
2320
2321
2322/**
2323 * Common 'pop register' helper.
2324 */
2325FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2326{
2327 if (IEM_IS_64BIT_CODE(pVCpu))
2328 {
2329 iReg |= pVCpu->iem.s.uRexB;
2330 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2331 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2332 }
2333
2334 switch (pVCpu->iem.s.enmEffOpSize)
2335 {
2336 case IEMMODE_16BIT:
2337 IEM_MC_BEGIN(0, 1);
2338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2339 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2340 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
2341 IEM_MC_POP_U16(pu16Dst);
2342 IEM_MC_ADVANCE_RIP_AND_FINISH();
2343 IEM_MC_END();
2344 break;
2345
2346 case IEMMODE_32BIT:
2347 IEM_MC_BEGIN(0, 1);
2348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2349 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2350 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
2351 IEM_MC_POP_U32(pu32Dst);
2352 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
2353 IEM_MC_ADVANCE_RIP_AND_FINISH();
2354 IEM_MC_END();
2355 break;
2356
2357 case IEMMODE_64BIT:
2358 IEM_MC_BEGIN(0, 1);
2359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2360 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2361 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
2362 IEM_MC_POP_U64(pu64Dst);
2363 IEM_MC_ADVANCE_RIP_AND_FINISH();
2364 IEM_MC_END();
2365 break;
2366
2367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2368 }
2369}
2370
2371
2372/**
2373 * @opcode 0x58
2374 */
2375FNIEMOP_DEF(iemOp_pop_eAX)
2376{
2377 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2378 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2379}
2380
2381
2382/**
2383 * @opcode 0x59
2384 */
2385FNIEMOP_DEF(iemOp_pop_eCX)
2386{
2387 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2388 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2389}
2390
2391
2392/**
2393 * @opcode 0x5a
2394 */
2395FNIEMOP_DEF(iemOp_pop_eDX)
2396{
2397 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2398 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2399}
2400
2401
2402/**
2403 * @opcode 0x5b
2404 */
2405FNIEMOP_DEF(iemOp_pop_eBX)
2406{
2407 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2408 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2409}
2410
2411
2412/**
2413 * @opcode 0x5c
2414 */
2415FNIEMOP_DEF(iemOp_pop_eSP)
2416{
2417 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2418 if (IEM_IS_64BIT_CODE(pVCpu))
2419 {
2420 if (pVCpu->iem.s.uRexB)
2421 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2422 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2423 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2424 }
2425
2426 /** @todo add testcase for this instruction. */
2427 switch (pVCpu->iem.s.enmEffOpSize)
2428 {
2429 case IEMMODE_16BIT:
2430 IEM_MC_BEGIN(0, 1);
2431 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2432 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2433 IEM_MC_LOCAL(uint16_t, u16Dst);
2434 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
2435 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
2436 IEM_MC_ADVANCE_RIP_AND_FINISH();
2437 IEM_MC_END();
2438 break;
2439
2440 case IEMMODE_32BIT:
2441 IEM_MC_BEGIN(0, 1);
2442 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2443 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2444 IEM_MC_LOCAL(uint32_t, u32Dst);
2445 IEM_MC_POP_U32(&u32Dst);
2446 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
2447 IEM_MC_ADVANCE_RIP_AND_FINISH();
2448 IEM_MC_END();
2449 break;
2450
2451 case IEMMODE_64BIT:
2452 IEM_MC_BEGIN(0, 1);
2453 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2454 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2455 IEM_MC_LOCAL(uint64_t, u64Dst);
2456 IEM_MC_POP_U64(&u64Dst);
2457 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
2458 IEM_MC_ADVANCE_RIP_AND_FINISH();
2459 IEM_MC_END();
2460 break;
2461
2462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2463 }
2464}
2465
2466
2467/**
2468 * @opcode 0x5d
2469 */
2470FNIEMOP_DEF(iemOp_pop_eBP)
2471{
2472 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2473 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2474}
2475
2476
2477/**
2478 * @opcode 0x5e
2479 */
2480FNIEMOP_DEF(iemOp_pop_eSI)
2481{
2482 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2483 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2484}
2485
2486
2487/**
2488 * @opcode 0x5f
2489 */
2490FNIEMOP_DEF(iemOp_pop_eDI)
2491{
2492 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2493 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2494}
2495
2496
2497/**
2498 * @opcode 0x60
2499 */
2500FNIEMOP_DEF(iemOp_pusha)
2501{
2502 IEMOP_MNEMONIC(pusha, "pusha");
2503 IEMOP_HLP_MIN_186();
2504 IEMOP_HLP_NO_64BIT();
2505 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2506 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_16);
2507 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2508 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_32);
2509}
2510
2511
2512/**
2513 * @opcode 0x61
2514 */
2515FNIEMOP_DEF(iemOp_popa__mvex)
2516{
2517 if (!IEM_IS_64BIT_CODE(pVCpu))
2518 {
2519 IEMOP_MNEMONIC(popa, "popa");
2520 IEMOP_HLP_MIN_186();
2521 IEMOP_HLP_NO_64BIT();
2522 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2523 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_popa_16);
2524 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2525 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_popa_32);
2526 }
2527 IEMOP_MNEMONIC(mvex, "mvex");
2528 Log(("mvex prefix is not supported!\n"));
2529 IEMOP_RAISE_INVALID_OPCODE_RET();
2530}
2531
2532
2533/**
2534 * @opcode 0x62
2535 * @opmnemonic bound
2536 * @op1 Gv_RO
2537 * @op2 Ma
2538 * @opmincpu 80186
2539 * @ophints harmless x86_invalid_64
2540 * @optest op1=0 op2=0 ->
2541 * @optest op1=1 op2=0 -> value.xcpt=5
2542 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2543 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2544 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2545 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2546 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2547 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2548 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2549 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2550 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2551 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2552 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2553 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2554 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2555 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2556 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2557 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2558 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2559 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2560 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2561 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2562 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2563 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2564 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2565 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2566 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2567 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2568 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2569 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2570 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2571 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2572 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2573 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2574 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2575 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2576 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2577 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2578 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2579 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2580 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2581 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2582 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2583 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2584 */
2585FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2586{
2587 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2588 compatability mode it is invalid with MOD=3.
2589
2590 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2591 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2592 given as R and X without an exact description, so we assume it builds on
2593 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2594 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2595 uint8_t bRm;
2596 if (!IEM_IS_64BIT_CODE(pVCpu))
2597 {
2598 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2599 IEMOP_HLP_MIN_186();
2600 IEM_OPCODE_GET_NEXT_U8(&bRm);
2601 if (IEM_IS_MODRM_MEM_MODE(bRm))
2602 {
2603 /** @todo testcase: check that there are two memory accesses involved. Check
2604 * whether they're both read before the \#BR triggers. */
2605 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2606 {
2607 IEM_MC_BEGIN(3, 1);
2608 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2609 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2610 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2612
2613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2615
2616 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2617 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2618 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2619
2620 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2621 IEM_MC_END();
2622 }
2623 else /* 32-bit operands */
2624 {
2625 IEM_MC_BEGIN(3, 1);
2626 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2627 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2628 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2630
2631 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2633
2634 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2635 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2636 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2637
2638 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2639 IEM_MC_END();
2640 }
2641 }
2642
2643 /*
2644 * @opdone
2645 */
2646 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2647 {
2648 /* Note that there is no need for the CPU to fetch further bytes
2649 here because MODRM.MOD == 3. */
2650 Log(("evex not supported by the guest CPU!\n"));
2651 IEMOP_RAISE_INVALID_OPCODE_RET();
2652 }
2653 }
2654 else
2655 {
2656 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2657 * does modr/m read, whereas AMD probably doesn't... */
2658 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2659 {
2660 Log(("evex not supported by the guest CPU!\n"));
2661 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2662 }
2663 IEM_OPCODE_GET_NEXT_U8(&bRm);
2664 }
2665
2666 IEMOP_MNEMONIC(evex, "evex");
2667 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2668 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2669 Log(("evex prefix is not implemented!\n"));
2670 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2671}
2672
2673
2674/** Opcode 0x63 - non-64-bit modes. */
2675FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2676{
2677 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2678 IEMOP_HLP_MIN_286();
2679 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2681
2682 if (IEM_IS_MODRM_REG_MODE(bRm))
2683 {
2684 /* Register */
2685 IEM_MC_BEGIN(3, 0);
2686 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2687 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2688 IEM_MC_ARG(uint16_t, u16Src, 1);
2689 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2690
2691 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2692 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2693 IEM_MC_REF_EFLAGS(pEFlags);
2694 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2695
2696 IEM_MC_ADVANCE_RIP_AND_FINISH();
2697 IEM_MC_END();
2698 }
2699 else
2700 {
2701 /* Memory */
2702 IEM_MC_BEGIN(3, 3);
2703 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2704 IEM_MC_ARG(uint16_t, u16Src, 1);
2705 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2707 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2708
2709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2710 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2711 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2712 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2713 IEM_MC_FETCH_EFLAGS(EFlags);
2714 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2715
2716 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
2717 IEM_MC_COMMIT_EFLAGS(EFlags);
2718 IEM_MC_ADVANCE_RIP_AND_FINISH();
2719 IEM_MC_END();
2720 }
2721}
2722
2723
2724/**
2725 * @opcode 0x63
2726 *
2727 * @note This is a weird one. It works like a regular move instruction if
2728 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2729 * @todo This definitely needs a testcase to verify the odd cases. */
2730FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2731{
2732 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2733
2734 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2736
2737 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2738 {
2739 if (IEM_IS_MODRM_REG_MODE(bRm))
2740 {
2741 /*
2742 * Register to register.
2743 */
2744 IEM_MC_BEGIN(0, 1);
2745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2746 IEM_MC_LOCAL(uint64_t, u64Value);
2747 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2748 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2749 IEM_MC_ADVANCE_RIP_AND_FINISH();
2750 IEM_MC_END();
2751 }
2752 else
2753 {
2754 /*
2755 * We're loading a register from memory.
2756 */
2757 IEM_MC_BEGIN(0, 2);
2758 IEM_MC_LOCAL(uint64_t, u64Value);
2759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2762 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2763 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2764 IEM_MC_ADVANCE_RIP_AND_FINISH();
2765 IEM_MC_END();
2766 }
2767 }
2768 else
2769 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2770}
2771
2772
2773/**
2774 * @opcode 0x64
2775 * @opmnemonic segfs
2776 * @opmincpu 80386
2777 * @opgroup og_prefixes
2778 */
2779FNIEMOP_DEF(iemOp_seg_FS)
2780{
2781 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2782 IEMOP_HLP_MIN_386();
2783
2784 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2785 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2786
2787 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2788 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2789}
2790
2791
2792/**
2793 * @opcode 0x65
2794 * @opmnemonic seggs
2795 * @opmincpu 80386
2796 * @opgroup og_prefixes
2797 */
2798FNIEMOP_DEF(iemOp_seg_GS)
2799{
2800 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2801 IEMOP_HLP_MIN_386();
2802
2803 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2804 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2805
2806 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2807 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2808}
2809
2810
2811/**
2812 * @opcode 0x66
2813 * @opmnemonic opsize
2814 * @openc prefix
2815 * @opmincpu 80386
2816 * @ophints harmless
2817 * @opgroup og_prefixes
2818 */
2819FNIEMOP_DEF(iemOp_op_size)
2820{
2821 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2822 IEMOP_HLP_MIN_386();
2823
2824 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2825 iemRecalEffOpSize(pVCpu);
2826
2827 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2828 when REPZ or REPNZ are present. */
2829 if (pVCpu->iem.s.idxPrefix == 0)
2830 pVCpu->iem.s.idxPrefix = 1;
2831
2832 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2833 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2834}
2835
2836
2837/**
2838 * @opcode 0x67
2839 * @opmnemonic addrsize
2840 * @openc prefix
2841 * @opmincpu 80386
2842 * @ophints harmless
2843 * @opgroup og_prefixes
2844 */
2845FNIEMOP_DEF(iemOp_addr_size)
2846{
2847 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2848 IEMOP_HLP_MIN_386();
2849
2850 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2851 switch (pVCpu->iem.s.enmDefAddrMode)
2852 {
2853 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2854 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2855 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2856 default: AssertFailed();
2857 }
2858
2859 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2860 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2861}
2862
2863
2864/**
2865 * @opcode 0x68
2866 */
2867FNIEMOP_DEF(iemOp_push_Iz)
2868{
2869 IEMOP_MNEMONIC(push_Iz, "push Iz");
2870 IEMOP_HLP_MIN_186();
2871 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2872 switch (pVCpu->iem.s.enmEffOpSize)
2873 {
2874 case IEMMODE_16BIT:
2875 {
2876 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2877 IEM_MC_BEGIN(0,0);
2878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2879 IEM_MC_PUSH_U16(u16Imm);
2880 IEM_MC_ADVANCE_RIP_AND_FINISH();
2881 IEM_MC_END();
2882 break;
2883 }
2884
2885 case IEMMODE_32BIT:
2886 {
2887 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2888 IEM_MC_BEGIN(0,0);
2889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2890 IEM_MC_PUSH_U32(u32Imm);
2891 IEM_MC_ADVANCE_RIP_AND_FINISH();
2892 IEM_MC_END();
2893 break;
2894 }
2895
2896 case IEMMODE_64BIT:
2897 {
2898 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2899 IEM_MC_BEGIN(0,0);
2900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2901 IEM_MC_PUSH_U64(u64Imm);
2902 IEM_MC_ADVANCE_RIP_AND_FINISH();
2903 IEM_MC_END();
2904 break;
2905 }
2906
2907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2908 }
2909}
2910
2911
2912/**
2913 * @opcode 0x69
2914 */
2915FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2916{
2917 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2918 IEMOP_HLP_MIN_186();
2919 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2920 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2921
2922 switch (pVCpu->iem.s.enmEffOpSize)
2923 {
2924 case IEMMODE_16BIT:
2925 {
2926 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2927 if (IEM_IS_MODRM_REG_MODE(bRm))
2928 {
2929 /* register operand */
2930 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2931 IEM_MC_BEGIN(3, 1);
2932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2933 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2934 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2935 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2936 IEM_MC_LOCAL(uint16_t, u16Tmp);
2937
2938 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2939 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2940 IEM_MC_REF_EFLAGS(pEFlags);
2941 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2942 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2943
2944 IEM_MC_ADVANCE_RIP_AND_FINISH();
2945 IEM_MC_END();
2946 }
2947 else
2948 {
2949 /* memory operand */
2950 IEM_MC_BEGIN(3, 2);
2951 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2952 IEM_MC_ARG(uint16_t, u16Src, 1);
2953 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2954 IEM_MC_LOCAL(uint16_t, u16Tmp);
2955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2956
2957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2958 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2959 IEM_MC_ASSIGN(u16Src, u16Imm);
2960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2961 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2962 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2963 IEM_MC_REF_EFLAGS(pEFlags);
2964 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2965 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2966
2967 IEM_MC_ADVANCE_RIP_AND_FINISH();
2968 IEM_MC_END();
2969 }
2970 break;
2971 }
2972
2973 case IEMMODE_32BIT:
2974 {
2975 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2976 if (IEM_IS_MODRM_REG_MODE(bRm))
2977 {
2978 /* register operand */
2979 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2980 IEM_MC_BEGIN(3, 1);
2981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2982 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2983 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2984 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2985 IEM_MC_LOCAL(uint32_t, u32Tmp);
2986
2987 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2988 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2989 IEM_MC_REF_EFLAGS(pEFlags);
2990 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2991 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2992
2993 IEM_MC_ADVANCE_RIP_AND_FINISH();
2994 IEM_MC_END();
2995 }
2996 else
2997 {
2998 /* memory operand */
2999 IEM_MC_BEGIN(3, 2);
3000 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3001 IEM_MC_ARG(uint32_t, u32Src, 1);
3002 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3003 IEM_MC_LOCAL(uint32_t, u32Tmp);
3004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3005
3006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3007 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3008 IEM_MC_ASSIGN(u32Src, u32Imm);
3009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3010 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3011 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3012 IEM_MC_REF_EFLAGS(pEFlags);
3013 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3014 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3015
3016 IEM_MC_ADVANCE_RIP_AND_FINISH();
3017 IEM_MC_END();
3018 }
3019 break;
3020 }
3021
3022 case IEMMODE_64BIT:
3023 {
3024 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3025 if (IEM_IS_MODRM_REG_MODE(bRm))
3026 {
3027 /* register operand */
3028 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3029 IEM_MC_BEGIN(3, 1);
3030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3031 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3032 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
3033 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3034 IEM_MC_LOCAL(uint64_t, u64Tmp);
3035
3036 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3037 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3038 IEM_MC_REF_EFLAGS(pEFlags);
3039 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3040 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3041
3042 IEM_MC_ADVANCE_RIP_AND_FINISH();
3043 IEM_MC_END();
3044 }
3045 else
3046 {
3047 /* memory operand */
3048 IEM_MC_BEGIN(3, 2);
3049 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3050 IEM_MC_ARG(uint64_t, u64Src, 1);
3051 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3052 IEM_MC_LOCAL(uint64_t, u64Tmp);
3053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3054
3055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3056 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3057 IEM_MC_ASSIGN_U32_SX_U64(u64Src, u32Imm); /* parameter count for the threaded function for this block. */
3058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3059 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3060 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3061 IEM_MC_REF_EFLAGS(pEFlags);
3062 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3063 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3064
3065 IEM_MC_ADVANCE_RIP_AND_FINISH();
3066 IEM_MC_END();
3067 }
3068 break;
3069 }
3070
3071 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3072 }
3073}
3074
3075
3076/**
3077 * @opcode 0x6a
3078 */
3079FNIEMOP_DEF(iemOp_push_Ib)
3080{
3081 IEMOP_MNEMONIC(push_Ib, "push Ib");
3082 IEMOP_HLP_MIN_186();
3083 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3084 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3085
3086 switch (pVCpu->iem.s.enmEffOpSize)
3087 {
3088 case IEMMODE_16BIT:
3089 IEM_MC_BEGIN(0,0);
3090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3091 IEM_MC_PUSH_U16(i8Imm);
3092 IEM_MC_ADVANCE_RIP_AND_FINISH();
3093 IEM_MC_END();
3094 break;
3095 case IEMMODE_32BIT:
3096 IEM_MC_BEGIN(0,0);
3097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3098 IEM_MC_PUSH_U32(i8Imm);
3099 IEM_MC_ADVANCE_RIP_AND_FINISH();
3100 IEM_MC_END();
3101 break;
3102 case IEMMODE_64BIT:
3103 IEM_MC_BEGIN(0,0);
3104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3105 IEM_MC_PUSH_U64(i8Imm);
3106 IEM_MC_ADVANCE_RIP_AND_FINISH();
3107 IEM_MC_END();
3108 break;
3109 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3110 }
3111}
3112
3113
3114/**
3115 * @opcode 0x6b
3116 */
3117FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3118{
3119 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3120 IEMOP_HLP_MIN_186();
3121 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3122 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3123
3124 switch (pVCpu->iem.s.enmEffOpSize)
3125 {
3126 case IEMMODE_16BIT:
3127 {
3128 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3129 if (IEM_IS_MODRM_REG_MODE(bRm))
3130 {
3131 /* register operand */
3132 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3133 IEM_MC_BEGIN(3, 1);
3134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3135 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3136 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3137 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3138 IEM_MC_LOCAL(uint16_t, u16Tmp);
3139
3140 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3141 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3142 IEM_MC_REF_EFLAGS(pEFlags);
3143 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3144 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3145
3146 IEM_MC_ADVANCE_RIP_AND_FINISH();
3147 IEM_MC_END();
3148 }
3149 else
3150 {
3151 /* memory operand */
3152 IEM_MC_BEGIN(3, 2);
3153 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3154 IEM_MC_ARG(uint16_t, u16Src, 1);
3155 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3156 IEM_MC_LOCAL(uint16_t, u16Tmp);
3157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3158
3159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3160 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3161 IEM_MC_ASSIGN(u16Src, u16Imm);
3162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3163 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3164 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3165 IEM_MC_REF_EFLAGS(pEFlags);
3166 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3167 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3168
3169 IEM_MC_ADVANCE_RIP_AND_FINISH();
3170 IEM_MC_END();
3171 }
3172 break;
3173 }
3174
3175 case IEMMODE_32BIT:
3176 {
3177 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3178 if (IEM_IS_MODRM_REG_MODE(bRm))
3179 {
3180 /* register operand */
3181 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3182 IEM_MC_BEGIN(3, 1);
3183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3184 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3185 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3186 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3187 IEM_MC_LOCAL(uint32_t, u32Tmp);
3188
3189 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3190 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3191 IEM_MC_REF_EFLAGS(pEFlags);
3192 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3193 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3194
3195 IEM_MC_ADVANCE_RIP_AND_FINISH();
3196 IEM_MC_END();
3197 }
3198 else
3199 {
3200 /* memory operand */
3201 IEM_MC_BEGIN(3, 2);
3202 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3203 IEM_MC_ARG(uint32_t, u32Src, 1);
3204 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3205 IEM_MC_LOCAL(uint32_t, u32Tmp);
3206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3207
3208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3209 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3210 IEM_MC_ASSIGN(u32Src, u32Imm);
3211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3212 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3213 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3214 IEM_MC_REF_EFLAGS(pEFlags);
3215 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3216 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3217
3218 IEM_MC_ADVANCE_RIP_AND_FINISH();
3219 IEM_MC_END();
3220 }
3221 break;
3222 }
3223
3224 case IEMMODE_64BIT:
3225 {
3226 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3227 if (IEM_IS_MODRM_REG_MODE(bRm))
3228 {
3229 /* register operand */
3230 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3231 IEM_MC_BEGIN(3, 1);
3232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3233 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3234 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
3235 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3236 IEM_MC_LOCAL(uint64_t, u64Tmp);
3237
3238 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3239 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3240 IEM_MC_REF_EFLAGS(pEFlags);
3241 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3242 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3243
3244 IEM_MC_ADVANCE_RIP_AND_FINISH();
3245 IEM_MC_END();
3246 }
3247 else
3248 {
3249 /* memory operand */
3250 IEM_MC_BEGIN(3, 2);
3251 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3252 IEM_MC_ARG(uint64_t, u64Src, 1);
3253 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3254 IEM_MC_LOCAL(uint64_t, u64Tmp);
3255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3256
3257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3258 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the */
3259 IEM_MC_ASSIGN_U8_SX_U64(u64Src, u8Imm); /* parameter count for the threaded function for this block. */
3260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3261 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3262 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3263 IEM_MC_REF_EFLAGS(pEFlags);
3264 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3265 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3266
3267 IEM_MC_ADVANCE_RIP_AND_FINISH();
3268 IEM_MC_END();
3269 }
3270 break;
3271 }
3272
3273 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3274 }
3275}
3276
3277
3278/**
3279 * @opcode 0x6c
3280 */
3281FNIEMOP_DEF(iemOp_insb_Yb_DX)
3282{
3283 IEMOP_HLP_MIN_186();
3284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3285 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3286 {
3287 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3288 switch (pVCpu->iem.s.enmEffAddrMode)
3289 {
3290 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3291 iemCImpl_rep_ins_op8_addr16, false);
3292 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3293 iemCImpl_rep_ins_op8_addr32, false);
3294 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3295 iemCImpl_rep_ins_op8_addr64, false);
3296 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3297 }
3298 }
3299 else
3300 {
3301 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3302 switch (pVCpu->iem.s.enmEffAddrMode)
3303 {
3304 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3305 iemCImpl_ins_op8_addr16, false);
3306 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3307 iemCImpl_ins_op8_addr32, false);
3308 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3309 iemCImpl_ins_op8_addr64, false);
3310 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3311 }
3312 }
3313}
3314
3315
3316/**
3317 * @opcode 0x6d
3318 */
3319FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3320{
3321 IEMOP_HLP_MIN_186();
3322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3323 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3324 {
3325 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3326 switch (pVCpu->iem.s.enmEffOpSize)
3327 {
3328 case IEMMODE_16BIT:
3329 switch (pVCpu->iem.s.enmEffAddrMode)
3330 {
3331 case IEMMODE_16BIT:
3332 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3333 iemCImpl_rep_ins_op16_addr16, false);
3334 case IEMMODE_32BIT:
3335 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3336 iemCImpl_rep_ins_op16_addr32, false);
3337 case IEMMODE_64BIT:
3338 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3339 iemCImpl_rep_ins_op16_addr64, false);
3340 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3341 }
3342 break;
3343 case IEMMODE_64BIT:
3344 case IEMMODE_32BIT:
3345 switch (pVCpu->iem.s.enmEffAddrMode)
3346 {
3347 case IEMMODE_16BIT:
3348 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3349 iemCImpl_rep_ins_op32_addr16, false);
3350 case IEMMODE_32BIT:
3351 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3352 iemCImpl_rep_ins_op32_addr32, false);
3353 case IEMMODE_64BIT:
3354 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3355 iemCImpl_rep_ins_op32_addr64, false);
3356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3357 }
3358 break;
3359 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3360 }
3361 }
3362 else
3363 {
3364 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3365 switch (pVCpu->iem.s.enmEffOpSize)
3366 {
3367 case IEMMODE_16BIT:
3368 switch (pVCpu->iem.s.enmEffAddrMode)
3369 {
3370 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3371 iemCImpl_ins_op16_addr16, false);
3372 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3373 iemCImpl_ins_op16_addr32, false);
3374 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3375 iemCImpl_ins_op16_addr64, false);
3376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3377 }
3378 break;
3379 case IEMMODE_64BIT:
3380 case IEMMODE_32BIT:
3381 switch (pVCpu->iem.s.enmEffAddrMode)
3382 {
3383 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3384 iemCImpl_ins_op32_addr16, false);
3385 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3386 iemCImpl_ins_op32_addr32, false);
3387 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3388 iemCImpl_ins_op32_addr64, false);
3389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3390 }
3391 break;
3392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3393 }
3394 }
3395}
3396
3397
3398/**
3399 * @opcode 0x6e
3400 */
3401FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3402{
3403 IEMOP_HLP_MIN_186();
3404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3405 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3406 {
3407 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3408 switch (pVCpu->iem.s.enmEffAddrMode)
3409 {
3410 case IEMMODE_16BIT:
3411 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3412 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3413 case IEMMODE_32BIT:
3414 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3415 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3416 case IEMMODE_64BIT:
3417 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3418 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3419 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3420 }
3421 }
3422 else
3423 {
3424 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3425 switch (pVCpu->iem.s.enmEffAddrMode)
3426 {
3427 case IEMMODE_16BIT:
3428 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3429 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3430 case IEMMODE_32BIT:
3431 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3432 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3433 case IEMMODE_64BIT:
3434 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3435 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3436 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3437 }
3438 }
3439}
3440
3441
3442/**
3443 * @opcode 0x6f
3444 */
3445FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3446{
3447 IEMOP_HLP_MIN_186();
3448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3449 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3450 {
3451 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3452 switch (pVCpu->iem.s.enmEffOpSize)
3453 {
3454 case IEMMODE_16BIT:
3455 switch (pVCpu->iem.s.enmEffAddrMode)
3456 {
3457 case IEMMODE_16BIT:
3458 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3459 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3460 case IEMMODE_32BIT:
3461 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3462 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3463 case IEMMODE_64BIT:
3464 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3465 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3466 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3467 }
3468 break;
3469 case IEMMODE_64BIT:
3470 case IEMMODE_32BIT:
3471 switch (pVCpu->iem.s.enmEffAddrMode)
3472 {
3473 case IEMMODE_16BIT:
3474 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3475 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3476 case IEMMODE_32BIT:
3477 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3478 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3479 case IEMMODE_64BIT:
3480 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3481 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3482 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3483 }
3484 break;
3485 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3486 }
3487 }
3488 else
3489 {
3490 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3491 switch (pVCpu->iem.s.enmEffOpSize)
3492 {
3493 case IEMMODE_16BIT:
3494 switch (pVCpu->iem.s.enmEffAddrMode)
3495 {
3496 case IEMMODE_16BIT:
3497 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3498 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3499 case IEMMODE_32BIT:
3500 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3501 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3502 case IEMMODE_64BIT:
3503 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3504 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3505 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3506 }
3507 break;
3508 case IEMMODE_64BIT:
3509 case IEMMODE_32BIT:
3510 switch (pVCpu->iem.s.enmEffAddrMode)
3511 {
3512 case IEMMODE_16BIT:
3513 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3514 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3515 case IEMMODE_32BIT:
3516 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3517 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3518 case IEMMODE_64BIT:
3519 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3520 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3521 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3522 }
3523 break;
3524 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3525 }
3526 }
3527}
3528
3529
3530/**
3531 * @opcode 0x70
3532 */
3533FNIEMOP_DEF(iemOp_jo_Jb)
3534{
3535 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3536 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3537 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3538
3539 IEM_MC_BEGIN(0, 0);
3540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3541 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3542 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3543 } IEM_MC_ELSE() {
3544 IEM_MC_ADVANCE_RIP_AND_FINISH();
3545 } IEM_MC_ENDIF();
3546 IEM_MC_END();
3547}
3548
3549
3550/**
3551 * @opcode 0x71
3552 */
3553FNIEMOP_DEF(iemOp_jno_Jb)
3554{
3555 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3556 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3557 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3558
3559 IEM_MC_BEGIN(0, 0);
3560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3561 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3562 IEM_MC_ADVANCE_RIP_AND_FINISH();
3563 } IEM_MC_ELSE() {
3564 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3565 } IEM_MC_ENDIF();
3566 IEM_MC_END();
3567}
3568
3569/**
3570 * @opcode 0x72
3571 */
3572FNIEMOP_DEF(iemOp_jc_Jb)
3573{
3574 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3575 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3576 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3577
3578 IEM_MC_BEGIN(0, 0);
3579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3580 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3581 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3582 } IEM_MC_ELSE() {
3583 IEM_MC_ADVANCE_RIP_AND_FINISH();
3584 } IEM_MC_ENDIF();
3585 IEM_MC_END();
3586}
3587
3588
3589/**
3590 * @opcode 0x73
3591 */
3592FNIEMOP_DEF(iemOp_jnc_Jb)
3593{
3594 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3595 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3596 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3597
3598 IEM_MC_BEGIN(0, 0);
3599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3600 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3601 IEM_MC_ADVANCE_RIP_AND_FINISH();
3602 } IEM_MC_ELSE() {
3603 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3604 } IEM_MC_ENDIF();
3605 IEM_MC_END();
3606}
3607
3608
3609/**
3610 * @opcode 0x74
3611 */
3612FNIEMOP_DEF(iemOp_je_Jb)
3613{
3614 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3615 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3616 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3617
3618 IEM_MC_BEGIN(0, 0);
3619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3620 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3621 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3622 } IEM_MC_ELSE() {
3623 IEM_MC_ADVANCE_RIP_AND_FINISH();
3624 } IEM_MC_ENDIF();
3625 IEM_MC_END();
3626}
3627
3628
3629/**
3630 * @opcode 0x75
3631 */
3632FNIEMOP_DEF(iemOp_jne_Jb)
3633{
3634 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3635 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3636 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3637
3638 IEM_MC_BEGIN(0, 0);
3639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3640 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3641 IEM_MC_ADVANCE_RIP_AND_FINISH();
3642 } IEM_MC_ELSE() {
3643 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3644 } IEM_MC_ENDIF();
3645 IEM_MC_END();
3646}
3647
3648
3649/**
3650 * @opcode 0x76
3651 */
3652FNIEMOP_DEF(iemOp_jbe_Jb)
3653{
3654 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3655 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3656 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3657
3658 IEM_MC_BEGIN(0, 0);
3659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3660 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3661 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3662 } IEM_MC_ELSE() {
3663 IEM_MC_ADVANCE_RIP_AND_FINISH();
3664 } IEM_MC_ENDIF();
3665 IEM_MC_END();
3666}
3667
3668
3669/**
3670 * @opcode 0x77
3671 */
3672FNIEMOP_DEF(iemOp_jnbe_Jb)
3673{
3674 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3675 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3676 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3677
3678 IEM_MC_BEGIN(0, 0);
3679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3680 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3681 IEM_MC_ADVANCE_RIP_AND_FINISH();
3682 } IEM_MC_ELSE() {
3683 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3684 } IEM_MC_ENDIF();
3685 IEM_MC_END();
3686}
3687
3688
3689/**
3690 * @opcode 0x78
3691 */
3692FNIEMOP_DEF(iemOp_js_Jb)
3693{
3694 IEMOP_MNEMONIC(js_Jb, "js Jb");
3695 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3696 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3697
3698 IEM_MC_BEGIN(0, 0);
3699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3700 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3701 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3702 } IEM_MC_ELSE() {
3703 IEM_MC_ADVANCE_RIP_AND_FINISH();
3704 } IEM_MC_ENDIF();
3705 IEM_MC_END();
3706}
3707
3708
3709/**
3710 * @opcode 0x79
3711 */
3712FNIEMOP_DEF(iemOp_jns_Jb)
3713{
3714 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3715 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3716 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3717
3718 IEM_MC_BEGIN(0, 0);
3719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3720 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3721 IEM_MC_ADVANCE_RIP_AND_FINISH();
3722 } IEM_MC_ELSE() {
3723 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3724 } IEM_MC_ENDIF();
3725 IEM_MC_END();
3726}
3727
3728
3729/**
3730 * @opcode 0x7a
3731 */
3732FNIEMOP_DEF(iemOp_jp_Jb)
3733{
3734 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3735 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3736 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3737
3738 IEM_MC_BEGIN(0, 0);
3739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3740 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3741 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3742 } IEM_MC_ELSE() {
3743 IEM_MC_ADVANCE_RIP_AND_FINISH();
3744 } IEM_MC_ENDIF();
3745 IEM_MC_END();
3746}
3747
3748
3749/**
3750 * @opcode 0x7b
3751 */
3752FNIEMOP_DEF(iemOp_jnp_Jb)
3753{
3754 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3755 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3756 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3757
3758 IEM_MC_BEGIN(0, 0);
3759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3760 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3761 IEM_MC_ADVANCE_RIP_AND_FINISH();
3762 } IEM_MC_ELSE() {
3763 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3764 } IEM_MC_ENDIF();
3765 IEM_MC_END();
3766}
3767
3768
3769/**
3770 * @opcode 0x7c
3771 */
3772FNIEMOP_DEF(iemOp_jl_Jb)
3773{
3774 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3775 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3776 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3777
3778 IEM_MC_BEGIN(0, 0);
3779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3780 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3781 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3782 } IEM_MC_ELSE() {
3783 IEM_MC_ADVANCE_RIP_AND_FINISH();
3784 } IEM_MC_ENDIF();
3785 IEM_MC_END();
3786}
3787
3788
3789/**
3790 * @opcode 0x7d
3791 */
3792FNIEMOP_DEF(iemOp_jnl_Jb)
3793{
3794 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3795 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3796 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3797
3798 IEM_MC_BEGIN(0, 0);
3799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3800 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3801 IEM_MC_ADVANCE_RIP_AND_FINISH();
3802 } IEM_MC_ELSE() {
3803 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3804 } IEM_MC_ENDIF();
3805 IEM_MC_END();
3806}
3807
3808
3809/**
3810 * @opcode 0x7e
3811 */
3812FNIEMOP_DEF(iemOp_jle_Jb)
3813{
3814 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3815 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3816 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3817
3818 IEM_MC_BEGIN(0, 0);
3819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3820 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3821 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3822 } IEM_MC_ELSE() {
3823 IEM_MC_ADVANCE_RIP_AND_FINISH();
3824 } IEM_MC_ENDIF();
3825 IEM_MC_END();
3826}
3827
3828
3829/**
3830 * @opcode 0x7f
3831 */
3832FNIEMOP_DEF(iemOp_jnle_Jb)
3833{
3834 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3835 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3836 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3837
3838 IEM_MC_BEGIN(0, 0);
3839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3840 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3841 IEM_MC_ADVANCE_RIP_AND_FINISH();
3842 } IEM_MC_ELSE() {
3843 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3844 } IEM_MC_ENDIF();
3845 IEM_MC_END();
3846}
3847
3848
3849/**
3850 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3851 * iemOp_Grp1_Eb_Ib_80.
3852 */
3853#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
3854 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3855 { \
3856 /* register target */ \
3857 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3858 IEM_MC_BEGIN(3, 0); \
3859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3860 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3861 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3862 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3863 \
3864 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3865 IEM_MC_REF_EFLAGS(pEFlags); \
3866 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3867 \
3868 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3869 IEM_MC_END(); \
3870 } \
3871 else \
3872 { \
3873 /* memory target */ \
3874 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3875 { \
3876 IEM_MC_BEGIN(3, 3); \
3877 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3878 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3880 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3881 \
3882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3883 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3884 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3885 IEMOP_HLP_DONE_DECODING(); \
3886 \
3887 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3888 IEM_MC_FETCH_EFLAGS(EFlags); \
3889 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3890 \
3891 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
3892 IEM_MC_COMMIT_EFLAGS(EFlags); \
3893 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3894 IEM_MC_END(); \
3895 } \
3896 else \
3897 { \
3898 (void)0
3899
3900#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3901 IEM_MC_BEGIN(3, 3); \
3902 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3903 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3905 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3906 \
3907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3908 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3909 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3910 IEMOP_HLP_DONE_DECODING(); \
3911 \
3912 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3913 IEM_MC_FETCH_EFLAGS(EFlags); \
3914 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
3915 \
3916 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
3917 IEM_MC_COMMIT_EFLAGS(EFlags); \
3918 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3919 IEM_MC_END(); \
3920 } \
3921 } \
3922 (void)0
3923
3924#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
3925 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3926 { \
3927 /* register target */ \
3928 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3929 IEM_MC_BEGIN(3, 0); \
3930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3931 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3932 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3933 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3934 \
3935 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3936 IEM_MC_REF_EFLAGS(pEFlags); \
3937 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3938 \
3939 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3940 IEM_MC_END(); \
3941 } \
3942 else \
3943 { \
3944 /* memory target */ \
3945 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3946 { \
3947 IEM_MC_BEGIN(3, 3); \
3948 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
3949 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3951 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3952 \
3953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3954 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3955 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3956 IEMOP_HLP_DONE_DECODING(); \
3957 \
3958 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3959 IEM_MC_FETCH_EFLAGS(EFlags); \
3960 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3961 \
3962 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
3963 IEM_MC_COMMIT_EFLAGS(EFlags); \
3964 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3965 IEM_MC_END(); \
3966 } \
3967 else \
3968 { \
3969 (void)0
3970
3971#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
3972 IEMOP_HLP_DONE_DECODING(); \
3973 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
3974 } \
3975 } \
3976 (void)0
3977
3978
3979
3980/**
3981 * @opmaps grp1_80,grp1_83
3982 * @opcode /0
3983 */
3984FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
3985{
3986 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
3987 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
3988 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
3989}
3990
3991
3992/**
3993 * @opmaps grp1_80,grp1_83
3994 * @opcode /1
3995 */
3996FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
3997{
3998 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
3999 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
4000 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
4001}
4002
4003
4004/**
4005 * @opmaps grp1_80,grp1_83
4006 * @opcode /2
4007 */
4008FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4009{
4010 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4011 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4012 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4013}
4014
4015
4016/**
4017 * @opmaps grp1_80,grp1_83
4018 * @opcode /3
4019 */
4020FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4021{
4022 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4023 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4024 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4025}
4026
4027
4028/**
4029 * @opmaps grp1_80,grp1_83
4030 * @opcode /4
4031 */
4032FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4033{
4034 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4035 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4036 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4037}
4038
4039
4040/**
4041 * @opmaps grp1_80,grp1_83
4042 * @opcode /5
4043 */
4044FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4045{
4046 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4047 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4048 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4049}
4050
4051
4052/**
4053 * @opmaps grp1_80,grp1_83
4054 * @opcode /6
4055 */
4056FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4057{
4058 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4059 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4060 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4061}
4062
4063
4064/**
4065 * @opmaps grp1_80,grp1_83
4066 * @opcode /7
4067 */
4068FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4069{
4070 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4071 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4072 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4073}
4074
4075
4076/**
4077 * @opcode 0x80
4078 */
4079FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4080{
4081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4082 switch (IEM_GET_MODRM_REG_8(bRm))
4083 {
4084 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4085 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4086 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4087 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4088 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4089 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4090 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4091 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4092 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4093 }
4094}
4095
4096
4097/**
4098 * Body for a group 1 binary operator.
4099 */
4100#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4101 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4102 { \
4103 /* register target */ \
4104 switch (pVCpu->iem.s.enmEffOpSize) \
4105 { \
4106 case IEMMODE_16BIT: \
4107 { \
4108 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4109 IEM_MC_BEGIN(3, 0); \
4110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4111 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4112 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4113 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4114 \
4115 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4116 IEM_MC_REF_EFLAGS(pEFlags); \
4117 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4118 \
4119 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4120 IEM_MC_END(); \
4121 break; \
4122 } \
4123 \
4124 case IEMMODE_32BIT: \
4125 { \
4126 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4127 IEM_MC_BEGIN(3, 0); \
4128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4129 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4130 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4131 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4132 \
4133 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4134 IEM_MC_REF_EFLAGS(pEFlags); \
4135 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4136 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
4137 \
4138 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4139 IEM_MC_END(); \
4140 break; \
4141 } \
4142 \
4143 case IEMMODE_64BIT: \
4144 { \
4145 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4146 IEM_MC_BEGIN(3, 0); \
4147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4148 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4149 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4150 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4151 \
4152 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4153 IEM_MC_REF_EFLAGS(pEFlags); \
4154 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4155 \
4156 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4157 IEM_MC_END(); \
4158 break; \
4159 } \
4160 \
4161 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4162 } \
4163 } \
4164 else \
4165 { \
4166 /* memory target */ \
4167 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4168 { \
4169 switch (pVCpu->iem.s.enmEffOpSize) \
4170 { \
4171 case IEMMODE_16BIT: \
4172 { \
4173 IEM_MC_BEGIN(3, 3); \
4174 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4175 IEM_MC_ARG(uint16_t, u16Src, 1); \
4176 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4178 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4179 \
4180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4181 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4182 IEM_MC_ASSIGN(u16Src, u16Imm); \
4183 IEMOP_HLP_DONE_DECODING(); \
4184 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4185 IEM_MC_FETCH_EFLAGS(EFlags); \
4186 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4187 \
4188 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4189 IEM_MC_COMMIT_EFLAGS(EFlags); \
4190 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4191 IEM_MC_END(); \
4192 break; \
4193 } \
4194 \
4195 case IEMMODE_32BIT: \
4196 { \
4197 IEM_MC_BEGIN(3, 3); \
4198 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4199 IEM_MC_ARG(uint32_t, u32Src, 1); \
4200 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4202 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4203 \
4204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4205 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4206 IEM_MC_ASSIGN(u32Src, u32Imm); \
4207 IEMOP_HLP_DONE_DECODING(); \
4208 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4209 IEM_MC_FETCH_EFLAGS(EFlags); \
4210 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4211 \
4212 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4213 IEM_MC_COMMIT_EFLAGS(EFlags); \
4214 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4215 IEM_MC_END(); \
4216 break; \
4217 } \
4218 \
4219 case IEMMODE_64BIT: \
4220 { \
4221 IEM_MC_BEGIN(3, 3); \
4222 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4223 IEM_MC_ARG(uint64_t, u64Src, 1); \
4224 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4226 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4227 \
4228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4229 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4230 IEMOP_HLP_DONE_DECODING(); \
4231 IEM_MC_ASSIGN(u64Src, u64Imm); \
4232 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4233 IEM_MC_FETCH_EFLAGS(EFlags); \
4234 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4235 \
4236 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4237 IEM_MC_COMMIT_EFLAGS(EFlags); \
4238 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4239 IEM_MC_END(); \
4240 break; \
4241 } \
4242 \
4243 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4244 } \
4245 } \
4246 else \
4247 { \
4248 (void)0
4249/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4250#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4251 switch (pVCpu->iem.s.enmEffOpSize) \
4252 { \
4253 case IEMMODE_16BIT: \
4254 { \
4255 IEM_MC_BEGIN(3, 3); \
4256 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4257 IEM_MC_ARG(uint16_t, u16Src, 1); \
4258 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4260 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4261 \
4262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4263 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4264 IEM_MC_ASSIGN(u16Src, u16Imm); \
4265 IEMOP_HLP_DONE_DECODING(); \
4266 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4267 IEM_MC_FETCH_EFLAGS(EFlags); \
4268 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4269 \
4270 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4271 IEM_MC_COMMIT_EFLAGS(EFlags); \
4272 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4273 IEM_MC_END(); \
4274 break; \
4275 } \
4276 \
4277 case IEMMODE_32BIT: \
4278 { \
4279 IEM_MC_BEGIN(3, 3); \
4280 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4281 IEM_MC_ARG(uint32_t, u32Src, 1); \
4282 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4284 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4285 \
4286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4287 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4288 IEM_MC_ASSIGN(u32Src, u32Imm); \
4289 IEMOP_HLP_DONE_DECODING(); \
4290 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4291 IEM_MC_FETCH_EFLAGS(EFlags); \
4292 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4293 \
4294 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4295 IEM_MC_COMMIT_EFLAGS(EFlags); \
4296 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4297 IEM_MC_END(); \
4298 break; \
4299 } \
4300 \
4301 case IEMMODE_64BIT: \
4302 { \
4303 IEM_MC_BEGIN(3, 3); \
4304 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4305 IEM_MC_ARG(uint64_t, u64Src, 1); \
4306 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4308 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4309 \
4310 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4311 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4312 IEMOP_HLP_DONE_DECODING(); \
4313 IEM_MC_ASSIGN(u64Src, u64Imm); \
4314 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4315 IEM_MC_FETCH_EFLAGS(EFlags); \
4316 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4317 \
4318 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4319 IEM_MC_COMMIT_EFLAGS(EFlags); \
4320 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4321 IEM_MC_END(); \
4322 break; \
4323 } \
4324 \
4325 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4326 } \
4327 } \
4328 } \
4329 (void)0
4330
4331/* read-only version */
4332#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4333 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4334 { \
4335 /* register target */ \
4336 switch (pVCpu->iem.s.enmEffOpSize) \
4337 { \
4338 case IEMMODE_16BIT: \
4339 { \
4340 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4341 IEM_MC_BEGIN(3, 0); \
4342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4343 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4344 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4345 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4346 \
4347 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4348 IEM_MC_REF_EFLAGS(pEFlags); \
4349 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4350 \
4351 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4352 IEM_MC_END(); \
4353 break; \
4354 } \
4355 \
4356 case IEMMODE_32BIT: \
4357 { \
4358 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4359 IEM_MC_BEGIN(3, 0); \
4360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4361 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4362 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4363 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4364 \
4365 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4366 IEM_MC_REF_EFLAGS(pEFlags); \
4367 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4368 \
4369 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4370 IEM_MC_END(); \
4371 break; \
4372 } \
4373 \
4374 case IEMMODE_64BIT: \
4375 { \
4376 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4377 IEM_MC_BEGIN(3, 0); \
4378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4379 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4380 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4381 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4382 \
4383 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4384 IEM_MC_REF_EFLAGS(pEFlags); \
4385 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4386 \
4387 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4388 IEM_MC_END(); \
4389 break; \
4390 } \
4391 \
4392 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4393 } \
4394 } \
4395 else \
4396 { \
4397 /* memory target */ \
4398 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4399 { \
4400 switch (pVCpu->iem.s.enmEffOpSize) \
4401 { \
4402 case IEMMODE_16BIT: \
4403 { \
4404 IEM_MC_BEGIN(3, 3); \
4405 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4406 IEM_MC_ARG(uint16_t, u16Src, 1); \
4407 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4409 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4410 \
4411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4412 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4413 IEM_MC_ASSIGN(u16Src, u16Imm); \
4414 IEMOP_HLP_DONE_DECODING(); \
4415 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4416 IEM_MC_FETCH_EFLAGS(EFlags); \
4417 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4418 \
4419 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
4420 IEM_MC_COMMIT_EFLAGS(EFlags); \
4421 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4422 IEM_MC_END(); \
4423 break; \
4424 } \
4425 \
4426 case IEMMODE_32BIT: \
4427 { \
4428 IEM_MC_BEGIN(3, 3); \
4429 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4430 IEM_MC_ARG(uint32_t, u32Src, 1); \
4431 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4433 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4434 \
4435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4436 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4437 IEM_MC_ASSIGN(u32Src, u32Imm); \
4438 IEMOP_HLP_DONE_DECODING(); \
4439 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4440 IEM_MC_FETCH_EFLAGS(EFlags); \
4441 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4442 \
4443 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
4444 IEM_MC_COMMIT_EFLAGS(EFlags); \
4445 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4446 IEM_MC_END(); \
4447 break; \
4448 } \
4449 \
4450 case IEMMODE_64BIT: \
4451 { \
4452 IEM_MC_BEGIN(3, 3); \
4453 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4454 IEM_MC_ARG(uint64_t, u64Src, 1); \
4455 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4457 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4458 \
4459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4460 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4461 IEMOP_HLP_DONE_DECODING(); \
4462 IEM_MC_ASSIGN(u64Src, u64Imm); \
4463 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4464 IEM_MC_FETCH_EFLAGS(EFlags); \
4465 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4466 \
4467 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
4468 IEM_MC_COMMIT_EFLAGS(EFlags); \
4469 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4470 IEM_MC_END(); \
4471 break; \
4472 } \
4473 \
4474 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4475 } \
4476 } \
4477 else \
4478 { \
4479 IEMOP_HLP_DONE_DECODING(); \
4480 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4481 } \
4482 } \
4483 (void)0
4484
4485
4486/**
4487 * @opmaps grp1_81
4488 * @opcode /0
4489 */
4490FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4491{
4492 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4493 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4494 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4495}
4496
4497
4498/**
4499 * @opmaps grp1_81
4500 * @opcode /1
4501 */
4502FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4503{
4504 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4505 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4506 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4507}
4508
4509
4510/**
4511 * @opmaps grp1_81
4512 * @opcode /2
4513 */
4514FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4515{
4516 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4517 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4518 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4519}
4520
4521
4522/**
4523 * @opmaps grp1_81
4524 * @opcode /3
4525 */
4526FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4527{
4528 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4529 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4530 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4531}
4532
4533
4534/**
4535 * @opmaps grp1_81
4536 * @opcode /4
4537 */
4538FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4539{
4540 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4541 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4542 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4543}
4544
4545
4546/**
4547 * @opmaps grp1_81
4548 * @opcode /5
4549 */
4550FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4551{
4552 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4553 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4554 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4555}
4556
4557
4558/**
4559 * @opmaps grp1_81
4560 * @opcode /6
4561 */
4562FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4563{
4564 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4565 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4566 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4567}
4568
4569
4570/**
4571 * @opmaps grp1_81
4572 * @opcode /7
4573 */
4574FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4575{
4576 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4577 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4578}
4579
4580
4581/**
4582 * @opcode 0x81
4583 */
4584FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4585{
4586 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4587 switch (IEM_GET_MODRM_REG_8(bRm))
4588 {
4589 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4590 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4591 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4592 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4593 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4594 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4595 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4596 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4597 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4598 }
4599}
4600
4601
4602/**
4603 * @opcode 0x82
4604 * @opmnemonic grp1_82
4605 * @opgroup og_groups
4606 */
4607FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4608{
4609 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4610 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4611}
4612
4613
4614/**
4615 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4616 * iemOp_Grp1_Ev_Ib.
4617 */
4618#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4619 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4620 { \
4621 /* \
4622 * Register target \
4623 */ \
4624 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4625 switch (pVCpu->iem.s.enmEffOpSize) \
4626 { \
4627 case IEMMODE_16BIT: \
4628 { \
4629 IEM_MC_BEGIN(3, 0); \
4630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4631 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4632 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4633 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4634 \
4635 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4636 IEM_MC_REF_EFLAGS(pEFlags); \
4637 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4638 \
4639 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4640 IEM_MC_END(); \
4641 break; \
4642 } \
4643 \
4644 case IEMMODE_32BIT: \
4645 { \
4646 IEM_MC_BEGIN(3, 0); \
4647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4648 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4649 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4650 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4651 \
4652 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4653 IEM_MC_REF_EFLAGS(pEFlags); \
4654 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4655 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
4656 \
4657 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4658 IEM_MC_END(); \
4659 break; \
4660 } \
4661 \
4662 case IEMMODE_64BIT: \
4663 { \
4664 IEM_MC_BEGIN(3, 0); \
4665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4666 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4667 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4668 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4669 \
4670 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4671 IEM_MC_REF_EFLAGS(pEFlags); \
4672 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4673 \
4674 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4675 IEM_MC_END(); \
4676 break; \
4677 } \
4678 \
4679 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4680 } \
4681 } \
4682 else \
4683 { \
4684 /* \
4685 * Memory target. \
4686 */ \
4687 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4688 { \
4689 switch (pVCpu->iem.s.enmEffOpSize) \
4690 { \
4691 case IEMMODE_16BIT: \
4692 { \
4693 IEM_MC_BEGIN(3, 3); \
4694 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4695 IEM_MC_ARG(uint16_t, u16Src, 1); \
4696 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4698 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4699 \
4700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4701 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4702 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4703 IEMOP_HLP_DONE_DECODING(); \
4704 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4705 IEM_MC_FETCH_EFLAGS(EFlags); \
4706 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4707 \
4708 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4709 IEM_MC_COMMIT_EFLAGS(EFlags); \
4710 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4711 IEM_MC_END(); \
4712 break; \
4713 } \
4714 \
4715 case IEMMODE_32BIT: \
4716 { \
4717 IEM_MC_BEGIN(3, 3); \
4718 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4719 IEM_MC_ARG(uint32_t, u32Src, 1); \
4720 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4722 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4723 \
4724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4725 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4726 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4727 IEMOP_HLP_DONE_DECODING(); \
4728 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4729 IEM_MC_FETCH_EFLAGS(EFlags); \
4730 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4731 \
4732 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4733 IEM_MC_COMMIT_EFLAGS(EFlags); \
4734 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4735 IEM_MC_END(); \
4736 break; \
4737 } \
4738 \
4739 case IEMMODE_64BIT: \
4740 { \
4741 IEM_MC_BEGIN(3, 3); \
4742 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4743 IEM_MC_ARG(uint64_t, u64Src, 1); \
4744 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4746 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4747 \
4748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4749 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4750 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4751 IEMOP_HLP_DONE_DECODING(); \
4752 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4753 IEM_MC_FETCH_EFLAGS(EFlags); \
4754 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4755 \
4756 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4757 IEM_MC_COMMIT_EFLAGS(EFlags); \
4758 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4759 IEM_MC_END(); \
4760 break; \
4761 } \
4762 \
4763 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4764 } \
4765 } \
4766 else \
4767 { \
4768 (void)0
4769/* Separate macro to work around parsing issue in IEMAllInstPython.py */
4770#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4771 switch (pVCpu->iem.s.enmEffOpSize) \
4772 { \
4773 case IEMMODE_16BIT: \
4774 { \
4775 IEM_MC_BEGIN(3, 3); \
4776 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4777 IEM_MC_ARG(uint16_t, u16Src, 1); \
4778 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4780 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4781 \
4782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4783 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4784 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4785 IEMOP_HLP_DONE_DECODING(); \
4786 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4787 IEM_MC_FETCH_EFLAGS(EFlags); \
4788 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4789 \
4790 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4791 IEM_MC_COMMIT_EFLAGS(EFlags); \
4792 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4793 IEM_MC_END(); \
4794 break; \
4795 } \
4796 \
4797 case IEMMODE_32BIT: \
4798 { \
4799 IEM_MC_BEGIN(3, 3); \
4800 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4801 IEM_MC_ARG(uint32_t, u32Src, 1); \
4802 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4803 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4804 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4805 \
4806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4807 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4808 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4809 IEMOP_HLP_DONE_DECODING(); \
4810 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4811 IEM_MC_FETCH_EFLAGS(EFlags); \
4812 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4813 \
4814 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4815 IEM_MC_COMMIT_EFLAGS(EFlags); \
4816 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4817 IEM_MC_END(); \
4818 break; \
4819 } \
4820 \
4821 case IEMMODE_64BIT: \
4822 { \
4823 IEM_MC_BEGIN(3, 3); \
4824 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4825 IEM_MC_ARG(uint64_t, u64Src, 1); \
4826 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4828 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4829 \
4830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4831 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4832 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4833 IEMOP_HLP_DONE_DECODING(); \
4834 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4835 IEM_MC_FETCH_EFLAGS(EFlags); \
4836 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4837 \
4838 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4839 IEM_MC_COMMIT_EFLAGS(EFlags); \
4840 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4841 IEM_MC_END(); \
4842 break; \
4843 } \
4844 \
4845 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4846 } \
4847 } \
4848 } \
4849 (void)0
4850
4851/* read-only variant */
4852#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4853 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4854 { \
4855 /* \
4856 * Register target \
4857 */ \
4858 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4859 switch (pVCpu->iem.s.enmEffOpSize) \
4860 { \
4861 case IEMMODE_16BIT: \
4862 { \
4863 IEM_MC_BEGIN(3, 0); \
4864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4865 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4866 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4867 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4868 \
4869 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4870 IEM_MC_REF_EFLAGS(pEFlags); \
4871 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4872 \
4873 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4874 IEM_MC_END(); \
4875 break; \
4876 } \
4877 \
4878 case IEMMODE_32BIT: \
4879 { \
4880 IEM_MC_BEGIN(3, 0); \
4881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4882 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4883 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4884 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4885 \
4886 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4887 IEM_MC_REF_EFLAGS(pEFlags); \
4888 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4889 \
4890 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4891 IEM_MC_END(); \
4892 break; \
4893 } \
4894 \
4895 case IEMMODE_64BIT: \
4896 { \
4897 IEM_MC_BEGIN(3, 0); \
4898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4899 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4900 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4901 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4902 \
4903 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4904 IEM_MC_REF_EFLAGS(pEFlags); \
4905 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4906 \
4907 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4908 IEM_MC_END(); \
4909 break; \
4910 } \
4911 \
4912 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4913 } \
4914 } \
4915 else \
4916 { \
4917 /* \
4918 * Memory target. \
4919 */ \
4920 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4921 { \
4922 switch (pVCpu->iem.s.enmEffOpSize) \
4923 { \
4924 case IEMMODE_16BIT: \
4925 { \
4926 IEM_MC_BEGIN(3, 3); \
4927 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4928 IEM_MC_ARG(uint16_t, u16Src, 1); \
4929 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4931 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4932 \
4933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4934 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4935 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4936 IEMOP_HLP_DONE_DECODING(); \
4937 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4938 IEM_MC_FETCH_EFLAGS(EFlags); \
4939 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4940 \
4941 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
4942 IEM_MC_COMMIT_EFLAGS(EFlags); \
4943 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4944 IEM_MC_END(); \
4945 break; \
4946 } \
4947 \
4948 case IEMMODE_32BIT: \
4949 { \
4950 IEM_MC_BEGIN(3, 3); \
4951 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4952 IEM_MC_ARG(uint32_t, u32Src, 1); \
4953 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4955 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4956 \
4957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4958 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4959 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4960 IEMOP_HLP_DONE_DECODING(); \
4961 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4962 IEM_MC_FETCH_EFLAGS(EFlags); \
4963 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4964 \
4965 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
4966 IEM_MC_COMMIT_EFLAGS(EFlags); \
4967 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4968 IEM_MC_END(); \
4969 break; \
4970 } \
4971 \
4972 case IEMMODE_64BIT: \
4973 { \
4974 IEM_MC_BEGIN(3, 3); \
4975 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4976 IEM_MC_ARG(uint64_t, u64Src, 1); \
4977 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4979 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4980 \
4981 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4982 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4983 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4984 IEMOP_HLP_DONE_DECODING(); \
4985 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4986 IEM_MC_FETCH_EFLAGS(EFlags); \
4987 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4988 \
4989 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
4990 IEM_MC_COMMIT_EFLAGS(EFlags); \
4991 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4992 IEM_MC_END(); \
4993 break; \
4994 } \
4995 \
4996 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4997 } \
4998 } \
4999 else \
5000 { \
5001 IEMOP_HLP_DONE_DECODING(); \
5002 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5003 } \
5004 } \
5005 (void)0
5006
5007/**
5008 * @opmaps grp1_83
5009 * @opcode /0
5010 */
5011FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5012{
5013 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5014 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
5015 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
5016}
5017
5018
5019/**
5020 * @opmaps grp1_83
5021 * @opcode /1
5022 */
5023FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5024{
5025 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5026 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
5027 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
5028}
5029
5030
5031/**
5032 * @opmaps grp1_83
5033 * @opcode /2
5034 */
5035FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5036{
5037 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5038 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5039 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5040}
5041
5042
5043/**
5044 * @opmaps grp1_83
5045 * @opcode /3
5046 */
5047FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5048{
5049 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5050 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5051 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5052}
5053
5054
5055/**
5056 * @opmaps grp1_83
5057 * @opcode /4
5058 */
5059FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5060{
5061 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5062 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5063 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5064}
5065
5066
5067/**
5068 * @opmaps grp1_83
5069 * @opcode /5
5070 */
5071FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5072{
5073 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5074 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5075 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5076}
5077
5078
5079/**
5080 * @opmaps grp1_83
5081 * @opcode /6
5082 */
5083FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5084{
5085 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5086 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5087 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5088}
5089
5090
5091/**
5092 * @opmaps grp1_83
5093 * @opcode /7
5094 */
5095FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5096{
5097 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5098 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5099}
5100
5101
5102/**
5103 * @opcode 0x83
5104 */
5105FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5106{
5107 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5108 to the 386 even if absent in the intel reference manuals and some
5109 3rd party opcode listings. */
5110 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5111 switch (IEM_GET_MODRM_REG_8(bRm))
5112 {
5113 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5114 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5115 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5116 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5117 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5118 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5119 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5120 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5121 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5122 }
5123}
5124
5125
5126/**
5127 * @opcode 0x84
5128 */
5129FNIEMOP_DEF(iemOp_test_Eb_Gb)
5130{
5131 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5132 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5133 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8);
5134 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
5135}
5136
5137
5138/**
5139 * @opcode 0x85
5140 */
5141FNIEMOP_DEF(iemOp_test_Ev_Gv)
5142{
5143 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5144 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5145 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64);
5146}
5147
5148
5149/**
5150 * @opcode 0x86
5151 */
5152FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5153{
5154 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5155 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5156
5157 /*
5158 * If rm is denoting a register, no more instruction bytes.
5159 */
5160 if (IEM_IS_MODRM_REG_MODE(bRm))
5161 {
5162 IEM_MC_BEGIN(0, 2);
5163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5164 IEM_MC_LOCAL(uint8_t, uTmp1);
5165 IEM_MC_LOCAL(uint8_t, uTmp2);
5166
5167 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5168 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5169 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5170 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5171
5172 IEM_MC_ADVANCE_RIP_AND_FINISH();
5173 IEM_MC_END();
5174 }
5175 else
5176 {
5177 /*
5178 * We're accessing memory.
5179 */
5180 IEM_MC_BEGIN(2, 4);
5181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5182 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5183 IEM_MC_LOCAL(uint8_t, uTmpReg);
5184 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
5185 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1);
5186
5187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5189 IEM_MC_MEM_MAP_U8_RW(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5190 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5191 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5192 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
5193 else
5194 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
5195 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Mem, bUnmapInfo);
5196 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5197
5198 IEM_MC_ADVANCE_RIP_AND_FINISH();
5199 IEM_MC_END();
5200 }
5201}
5202
5203
5204/**
5205 * @opcode 0x87
5206 */
5207FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5208{
5209 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5210 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5211
5212 /*
5213 * If rm is denoting a register, no more instruction bytes.
5214 */
5215 if (IEM_IS_MODRM_REG_MODE(bRm))
5216 {
5217 switch (pVCpu->iem.s.enmEffOpSize)
5218 {
5219 case IEMMODE_16BIT:
5220 IEM_MC_BEGIN(0, 2);
5221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5222 IEM_MC_LOCAL(uint16_t, uTmp1);
5223 IEM_MC_LOCAL(uint16_t, uTmp2);
5224
5225 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5226 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5227 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5228 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5229
5230 IEM_MC_ADVANCE_RIP_AND_FINISH();
5231 IEM_MC_END();
5232 break;
5233
5234 case IEMMODE_32BIT:
5235 IEM_MC_BEGIN(0, 2);
5236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5237 IEM_MC_LOCAL(uint32_t, uTmp1);
5238 IEM_MC_LOCAL(uint32_t, uTmp2);
5239
5240 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5241 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5242 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5243 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5244
5245 IEM_MC_ADVANCE_RIP_AND_FINISH();
5246 IEM_MC_END();
5247 break;
5248
5249 case IEMMODE_64BIT:
5250 IEM_MC_BEGIN(0, 2);
5251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5252 IEM_MC_LOCAL(uint64_t, uTmp1);
5253 IEM_MC_LOCAL(uint64_t, uTmp2);
5254
5255 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5256 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5257 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5258 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5259
5260 IEM_MC_ADVANCE_RIP_AND_FINISH();
5261 IEM_MC_END();
5262 break;
5263
5264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5265 }
5266 }
5267 else
5268 {
5269 /*
5270 * We're accessing memory.
5271 */
5272 switch (pVCpu->iem.s.enmEffOpSize)
5273 {
5274 case IEMMODE_16BIT:
5275 IEM_MC_BEGIN(2, 4);
5276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5277 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5278 IEM_MC_LOCAL(uint16_t, uTmpReg);
5279 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
5280 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1);
5281
5282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5284 IEM_MC_MEM_MAP_U16_RW(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5285 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5286 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5287 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
5288 else
5289 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
5290 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Mem, bUnmapInfo);
5291 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5292
5293 IEM_MC_ADVANCE_RIP_AND_FINISH();
5294 IEM_MC_END();
5295 break;
5296
5297 case IEMMODE_32BIT:
5298 IEM_MC_BEGIN(2, 4);
5299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5300 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5301 IEM_MC_LOCAL(uint32_t, uTmpReg);
5302 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
5303 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1);
5304
5305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5307 IEM_MC_MEM_MAP_U32_RW(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5308 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5309 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5310 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
5311 else
5312 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
5313 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Mem, bUnmapInfo);
5314 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5315
5316 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
5317 IEM_MC_ADVANCE_RIP_AND_FINISH();
5318 IEM_MC_END();
5319 break;
5320
5321 case IEMMODE_64BIT:
5322 IEM_MC_BEGIN(2, 4);
5323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5324 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5325 IEM_MC_LOCAL(uint64_t, uTmpReg);
5326 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
5327 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1);
5328
5329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5331 IEM_MC_MEM_MAP_U64_RW(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5332 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5333 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5334 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
5335 else
5336 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
5337 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Mem, bUnmapInfo);
5338 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5339
5340 IEM_MC_ADVANCE_RIP_AND_FINISH();
5341 IEM_MC_END();
5342 break;
5343
5344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5345 }
5346 }
5347}
5348
5349
5350/**
5351 * @opcode 0x88
5352 */
5353FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5354{
5355 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5356
5357 uint8_t bRm;
5358 IEM_OPCODE_GET_NEXT_U8(&bRm);
5359
5360 /*
5361 * If rm is denoting a register, no more instruction bytes.
5362 */
5363 if (IEM_IS_MODRM_REG_MODE(bRm))
5364 {
5365 IEM_MC_BEGIN(0, 1);
5366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5367 IEM_MC_LOCAL(uint8_t, u8Value);
5368 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5369 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5370 IEM_MC_ADVANCE_RIP_AND_FINISH();
5371 IEM_MC_END();
5372 }
5373 else
5374 {
5375 /*
5376 * We're writing a register to memory.
5377 */
5378 IEM_MC_BEGIN(0, 2);
5379 IEM_MC_LOCAL(uint8_t, u8Value);
5380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5383 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5384 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5385 IEM_MC_ADVANCE_RIP_AND_FINISH();
5386 IEM_MC_END();
5387 }
5388}
5389
5390
5391/**
5392 * @opcode 0x89
5393 */
5394FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5395{
5396 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5397
5398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5399
5400 /*
5401 * If rm is denoting a register, no more instruction bytes.
5402 */
5403 if (IEM_IS_MODRM_REG_MODE(bRm))
5404 {
5405 switch (pVCpu->iem.s.enmEffOpSize)
5406 {
5407 case IEMMODE_16BIT:
5408 IEM_MC_BEGIN(0, 1);
5409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5410 IEM_MC_LOCAL(uint16_t, u16Value);
5411 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5412 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5413 IEM_MC_ADVANCE_RIP_AND_FINISH();
5414 IEM_MC_END();
5415 break;
5416
5417 case IEMMODE_32BIT:
5418 IEM_MC_BEGIN(0, 1);
5419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5420 IEM_MC_LOCAL(uint32_t, u32Value);
5421 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5422 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5423 IEM_MC_ADVANCE_RIP_AND_FINISH();
5424 IEM_MC_END();
5425 break;
5426
5427 case IEMMODE_64BIT:
5428 IEM_MC_BEGIN(0, 1);
5429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5430 IEM_MC_LOCAL(uint64_t, u64Value);
5431 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5432 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5433 IEM_MC_ADVANCE_RIP_AND_FINISH();
5434 IEM_MC_END();
5435 break;
5436
5437 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5438 }
5439 }
5440 else
5441 {
5442 /*
5443 * We're writing a register to memory.
5444 */
5445 switch (pVCpu->iem.s.enmEffOpSize)
5446 {
5447 case IEMMODE_16BIT:
5448 IEM_MC_BEGIN(0, 2);
5449 IEM_MC_LOCAL(uint16_t, u16Value);
5450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5453 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5454 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5455 IEM_MC_ADVANCE_RIP_AND_FINISH();
5456 IEM_MC_END();
5457 break;
5458
5459 case IEMMODE_32BIT:
5460 IEM_MC_BEGIN(0, 2);
5461 IEM_MC_LOCAL(uint32_t, u32Value);
5462 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5465 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5466 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5467 IEM_MC_ADVANCE_RIP_AND_FINISH();
5468 IEM_MC_END();
5469 break;
5470
5471 case IEMMODE_64BIT:
5472 IEM_MC_BEGIN(0, 2);
5473 IEM_MC_LOCAL(uint64_t, u64Value);
5474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5477 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5478 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5479 IEM_MC_ADVANCE_RIP_AND_FINISH();
5480 IEM_MC_END();
5481 break;
5482
5483 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5484 }
5485 }
5486}
5487
5488
5489/**
5490 * @opcode 0x8a
5491 */
5492FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5493{
5494 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5495
5496 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5497
5498 /*
5499 * If rm is denoting a register, no more instruction bytes.
5500 */
5501 if (IEM_IS_MODRM_REG_MODE(bRm))
5502 {
5503 IEM_MC_BEGIN(0, 1);
5504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5505 IEM_MC_LOCAL(uint8_t, u8Value);
5506 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5507 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5508 IEM_MC_ADVANCE_RIP_AND_FINISH();
5509 IEM_MC_END();
5510 }
5511 else
5512 {
5513 /*
5514 * We're loading a register from memory.
5515 */
5516 IEM_MC_BEGIN(0, 2);
5517 IEM_MC_LOCAL(uint8_t, u8Value);
5518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5521 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5522 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5523 IEM_MC_ADVANCE_RIP_AND_FINISH();
5524 IEM_MC_END();
5525 }
5526}
5527
5528
5529/**
5530 * @opcode 0x8b
5531 */
5532FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5533{
5534 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5535
5536 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5537
5538 /*
5539 * If rm is denoting a register, no more instruction bytes.
5540 */
5541 if (IEM_IS_MODRM_REG_MODE(bRm))
5542 {
5543 switch (pVCpu->iem.s.enmEffOpSize)
5544 {
5545 case IEMMODE_16BIT:
5546 IEM_MC_BEGIN(0, 1);
5547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5548 IEM_MC_LOCAL(uint16_t, u16Value);
5549 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5550 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5551 IEM_MC_ADVANCE_RIP_AND_FINISH();
5552 IEM_MC_END();
5553 break;
5554
5555 case IEMMODE_32BIT:
5556 IEM_MC_BEGIN(0, 1);
5557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5558 IEM_MC_LOCAL(uint32_t, u32Value);
5559 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5560 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5561 IEM_MC_ADVANCE_RIP_AND_FINISH();
5562 IEM_MC_END();
5563 break;
5564
5565 case IEMMODE_64BIT:
5566 IEM_MC_BEGIN(0, 1);
5567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5568 IEM_MC_LOCAL(uint64_t, u64Value);
5569 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5570 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5571 IEM_MC_ADVANCE_RIP_AND_FINISH();
5572 IEM_MC_END();
5573 break;
5574
5575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5576 }
5577 }
5578 else
5579 {
5580 /*
5581 * We're loading a register from memory.
5582 */
5583 switch (pVCpu->iem.s.enmEffOpSize)
5584 {
5585 case IEMMODE_16BIT:
5586 IEM_MC_BEGIN(0, 2);
5587 IEM_MC_LOCAL(uint16_t, u16Value);
5588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5589 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5591 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5592 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5593 IEM_MC_ADVANCE_RIP_AND_FINISH();
5594 IEM_MC_END();
5595 break;
5596
5597 case IEMMODE_32BIT:
5598 IEM_MC_BEGIN(0, 2);
5599 IEM_MC_LOCAL(uint32_t, u32Value);
5600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5603 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5604 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5605 IEM_MC_ADVANCE_RIP_AND_FINISH();
5606 IEM_MC_END();
5607 break;
5608
5609 case IEMMODE_64BIT:
5610 IEM_MC_BEGIN(0, 2);
5611 IEM_MC_LOCAL(uint64_t, u64Value);
5612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5615 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5616 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5617 IEM_MC_ADVANCE_RIP_AND_FINISH();
5618 IEM_MC_END();
5619 break;
5620
5621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5622 }
5623 }
5624}
5625
5626
5627/**
5628 * opcode 0x63
5629 * @todo Table fixme
5630 */
5631FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5632{
5633 if (!IEM_IS_64BIT_CODE(pVCpu))
5634 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5635 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5636 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5637 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5638}
5639
5640
5641/**
5642 * @opcode 0x8c
5643 */
5644FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5645{
5646 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5647
5648 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5649
5650 /*
5651 * Check that the destination register exists. The REX.R prefix is ignored.
5652 */
5653 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5654 if (iSegReg > X86_SREG_GS)
5655 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5656
5657 /*
5658 * If rm is denoting a register, no more instruction bytes.
5659 * In that case, the operand size is respected and the upper bits are
5660 * cleared (starting with some pentium).
5661 */
5662 if (IEM_IS_MODRM_REG_MODE(bRm))
5663 {
5664 switch (pVCpu->iem.s.enmEffOpSize)
5665 {
5666 case IEMMODE_16BIT:
5667 IEM_MC_BEGIN(0, 1);
5668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5669 IEM_MC_LOCAL(uint16_t, u16Value);
5670 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5671 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5672 IEM_MC_ADVANCE_RIP_AND_FINISH();
5673 IEM_MC_END();
5674 break;
5675
5676 case IEMMODE_32BIT:
5677 IEM_MC_BEGIN(0, 1);
5678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5679 IEM_MC_LOCAL(uint32_t, u32Value);
5680 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5681 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5682 IEM_MC_ADVANCE_RIP_AND_FINISH();
5683 IEM_MC_END();
5684 break;
5685
5686 case IEMMODE_64BIT:
5687 IEM_MC_BEGIN(0, 1);
5688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5689 IEM_MC_LOCAL(uint64_t, u64Value);
5690 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5691 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5692 IEM_MC_ADVANCE_RIP_AND_FINISH();
5693 IEM_MC_END();
5694 break;
5695
5696 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5697 }
5698 }
5699 else
5700 {
5701 /*
5702 * We're saving the register to memory. The access is word sized
5703 * regardless of operand size prefixes.
5704 */
5705#if 0 /* not necessary */
5706 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5707#endif
5708 IEM_MC_BEGIN(0, 2);
5709 IEM_MC_LOCAL(uint16_t, u16Value);
5710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5713 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5714 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5715 IEM_MC_ADVANCE_RIP_AND_FINISH();
5716 IEM_MC_END();
5717 }
5718}
5719
5720
5721
5722
5723/**
5724 * @opcode 0x8d
5725 */
5726FNIEMOP_DEF(iemOp_lea_Gv_M)
5727{
5728 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5729 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5730 if (IEM_IS_MODRM_REG_MODE(bRm))
5731 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5732
5733 switch (pVCpu->iem.s.enmEffOpSize)
5734 {
5735 case IEMMODE_16BIT:
5736 IEM_MC_BEGIN(0, 2);
5737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5738 IEM_MC_LOCAL(uint16_t, u16Cast);
5739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5741 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5742 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5743 IEM_MC_ADVANCE_RIP_AND_FINISH();
5744 IEM_MC_END();
5745 break;
5746
5747 case IEMMODE_32BIT:
5748 IEM_MC_BEGIN(0, 2);
5749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5750 IEM_MC_LOCAL(uint32_t, u32Cast);
5751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5753 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5754 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5755 IEM_MC_ADVANCE_RIP_AND_FINISH();
5756 IEM_MC_END();
5757 break;
5758
5759 case IEMMODE_64BIT:
5760 IEM_MC_BEGIN(0, 1);
5761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5764 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5765 IEM_MC_ADVANCE_RIP_AND_FINISH();
5766 IEM_MC_END();
5767 break;
5768
5769 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5770 }
5771}
5772
5773
5774/**
5775 * @opcode 0x8e
5776 */
5777FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5778{
5779 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5780
5781 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5782
5783 /*
5784 * The practical operand size is 16-bit.
5785 */
5786#if 0 /* not necessary */
5787 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5788#endif
5789
5790 /*
5791 * Check that the destination register exists and can be used with this
5792 * instruction. The REX.R prefix is ignored.
5793 */
5794 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5795 /** @todo r=bird: What does 8086 do here wrt CS? */
5796 if ( iSegReg == X86_SREG_CS
5797 || iSegReg > X86_SREG_GS)
5798 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5799
5800 /*
5801 * If rm is denoting a register, no more instruction bytes.
5802 */
5803 if (IEM_IS_MODRM_REG_MODE(bRm))
5804 {
5805 IEM_MC_BEGIN(2, 0);
5806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5807 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5808 IEM_MC_ARG(uint16_t, u16Value, 1);
5809 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5810 if (iSRegArg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5811 IEM_MC_CALL_CIMPL_2( 0, iemCImpl_load_SReg, iSRegArg, u16Value);
5812 else
5813 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE, iemCImpl_load_SReg, iSRegArg, u16Value);
5814 IEM_MC_END();
5815 }
5816 else
5817 {
5818 /*
5819 * We're loading the register from memory. The access is word sized
5820 * regardless of operand size prefixes.
5821 */
5822 IEM_MC_BEGIN(2, 1);
5823 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5824 IEM_MC_ARG(uint16_t, u16Value, 1);
5825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5828 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5829 if (iSRegArg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5830 IEM_MC_CALL_CIMPL_2( 0, iemCImpl_load_SReg, iSRegArg, u16Value);
5831 else
5832 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE, iemCImpl_load_SReg, iSRegArg, u16Value);
5833 IEM_MC_END();
5834 }
5835}
5836
5837
5838/** Opcode 0x8f /0. */
5839FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5840{
5841 /* This bugger is rather annoying as it requires rSP to be updated before
5842 doing the effective address calculations. Will eventually require a
5843 split between the R/M+SIB decoding and the effective address
5844 calculation - which is something that is required for any attempt at
5845 reusing this code for a recompiler. It may also be good to have if we
5846 need to delay #UD exception caused by invalid lock prefixes.
5847
5848 For now, we'll do a mostly safe interpreter-only implementation here. */
5849 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5850 * now until tests show it's checked.. */
5851 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5852
5853 /* Register access is relatively easy and can share code. */
5854 if (IEM_IS_MODRM_REG_MODE(bRm))
5855 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5856
5857 /*
5858 * Memory target.
5859 *
5860 * Intel says that RSP is incremented before it's used in any effective
5861 * address calcuations. This means some serious extra annoyance here since
5862 * we decode and calculate the effective address in one step and like to
5863 * delay committing registers till everything is done.
5864 *
5865 * So, we'll decode and calculate the effective address twice. This will
5866 * require some recoding if turned into a recompiler.
5867 */
5868 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5869
5870#if 1 /* This can be compiled, optimize later if needed. */
5871 switch (pVCpu->iem.s.enmEffOpSize)
5872 {
5873 case IEMMODE_16BIT:
5874 {
5875 IEM_MC_BEGIN(2, 0);
5876 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5877 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
5879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5880 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5881 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
5882 IEM_MC_END();
5883 }
5884
5885 case IEMMODE_32BIT:
5886 {
5887 IEM_MC_BEGIN(2, 0);
5888 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5889 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5890 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
5891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5892 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5893 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
5894 IEM_MC_END();
5895 }
5896
5897 case IEMMODE_64BIT:
5898 {
5899 IEM_MC_BEGIN(2, 0);
5900 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5901 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
5903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5904 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5905 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
5906 IEM_MC_END();
5907 }
5908
5909 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5910 }
5911
5912#else
5913# ifndef TST_IEM_CHECK_MC
5914 /* Calc effective address with modified ESP. */
5915/** @todo testcase */
5916 RTGCPTR GCPtrEff;
5917 VBOXSTRICTRC rcStrict;
5918 switch (pVCpu->iem.s.enmEffOpSize)
5919 {
5920 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
5921 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
5922 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
5923 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5924 }
5925 if (rcStrict != VINF_SUCCESS)
5926 return rcStrict;
5927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5928
5929 /* Perform the operation - this should be CImpl. */
5930 RTUINT64U TmpRsp;
5931 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
5932 switch (pVCpu->iem.s.enmEffOpSize)
5933 {
5934 case IEMMODE_16BIT:
5935 {
5936 uint16_t u16Value;
5937 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
5938 if (rcStrict == VINF_SUCCESS)
5939 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
5940 break;
5941 }
5942
5943 case IEMMODE_32BIT:
5944 {
5945 uint32_t u32Value;
5946 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
5947 if (rcStrict == VINF_SUCCESS)
5948 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
5949 break;
5950 }
5951
5952 case IEMMODE_64BIT:
5953 {
5954 uint64_t u64Value;
5955 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
5956 if (rcStrict == VINF_SUCCESS)
5957 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
5958 break;
5959 }
5960
5961 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5962 }
5963 if (rcStrict == VINF_SUCCESS)
5964 {
5965 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
5966 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
5967 }
5968 return rcStrict;
5969
5970# else
5971 return VERR_IEM_IPE_2;
5972# endif
5973#endif
5974}
5975
5976
5977/**
5978 * @opcode 0x8f
5979 */
5980FNIEMOP_DEF(iemOp_Grp1A__xop)
5981{
5982 /*
5983 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
5984 * three byte VEX prefix, except that the mmmmm field cannot have the values
5985 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
5986 */
5987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5988 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
5989 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
5990
5991 IEMOP_MNEMONIC(xop, "xop");
5992 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
5993 {
5994 /** @todo Test when exctly the XOP conformance checks kick in during
5995 * instruction decoding and fetching (using \#PF). */
5996 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
5997 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5998 if ( ( pVCpu->iem.s.fPrefixes
5999 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6000 == 0)
6001 {
6002 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6003 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6004 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6005 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6006 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6007 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6008 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6009 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6010 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6011
6012 /** @todo XOP: Just use new tables and decoders. */
6013 switch (bRm & 0x1f)
6014 {
6015 case 8: /* xop opcode map 8. */
6016 IEMOP_BITCH_ABOUT_STUB();
6017 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6018
6019 case 9: /* xop opcode map 9. */
6020 IEMOP_BITCH_ABOUT_STUB();
6021 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6022
6023 case 10: /* xop opcode map 10. */
6024 IEMOP_BITCH_ABOUT_STUB();
6025 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6026
6027 default:
6028 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6029 IEMOP_RAISE_INVALID_OPCODE_RET();
6030 }
6031 }
6032 else
6033 Log(("XOP: Invalid prefix mix!\n"));
6034 }
6035 else
6036 Log(("XOP: XOP support disabled!\n"));
6037 IEMOP_RAISE_INVALID_OPCODE_RET();
6038}
6039
6040
6041/**
6042 * Common 'xchg reg,rAX' helper.
6043 */
6044FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6045{
6046 iReg |= pVCpu->iem.s.uRexB;
6047 switch (pVCpu->iem.s.enmEffOpSize)
6048 {
6049 case IEMMODE_16BIT:
6050 IEM_MC_BEGIN(0, 2);
6051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6052 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6053 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6054 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6055 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6056 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6057 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6058 IEM_MC_ADVANCE_RIP_AND_FINISH();
6059 IEM_MC_END();
6060 break;
6061
6062 case IEMMODE_32BIT:
6063 IEM_MC_BEGIN(0, 2);
6064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6065 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6066 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6067 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6068 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6069 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6070 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6071 IEM_MC_ADVANCE_RIP_AND_FINISH();
6072 IEM_MC_END();
6073 break;
6074
6075 case IEMMODE_64BIT:
6076 IEM_MC_BEGIN(0, 2);
6077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6078 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6079 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6080 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6081 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6082 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6083 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6084 IEM_MC_ADVANCE_RIP_AND_FINISH();
6085 IEM_MC_END();
6086 break;
6087
6088 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6089 }
6090}
6091
6092
6093/**
6094 * @opcode 0x90
6095 */
6096FNIEMOP_DEF(iemOp_nop)
6097{
6098 /* R8/R8D and RAX/EAX can be exchanged. */
6099 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6100 {
6101 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6102 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6103 }
6104
6105 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6106 {
6107 IEMOP_MNEMONIC(pause, "pause");
6108 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6109 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6110 if (!IEM_IS_IN_GUEST(pVCpu))
6111 { /* probable */ }
6112#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6113 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6114 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmx_pause);
6115#endif
6116#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6117 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6118 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_svm_pause);
6119#endif
6120 }
6121 else
6122 IEMOP_MNEMONIC(nop, "nop");
6123 /** @todo testcase: lock nop; lock pause */
6124 IEM_MC_BEGIN(0, 0);
6125 IEMOP_HLP_DONE_DECODING();
6126 IEM_MC_ADVANCE_RIP_AND_FINISH();
6127 IEM_MC_END();
6128}
6129
6130
6131/**
6132 * @opcode 0x91
6133 */
6134FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6135{
6136 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6137 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6138}
6139
6140
6141/**
6142 * @opcode 0x92
6143 */
6144FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6145{
6146 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6147 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6148}
6149
6150
6151/**
6152 * @opcode 0x93
6153 */
6154FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6155{
6156 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6157 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6158}
6159
6160
6161/**
6162 * @opcode 0x94
6163 */
6164FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6165{
6166 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6167 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6168}
6169
6170
6171/**
6172 * @opcode 0x95
6173 */
6174FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6175{
6176 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6177 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6178}
6179
6180
6181/**
6182 * @opcode 0x96
6183 */
6184FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6185{
6186 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6187 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6188}
6189
6190
6191/**
6192 * @opcode 0x97
6193 */
6194FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6195{
6196 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6197 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6198}
6199
6200
6201/**
6202 * @opcode 0x98
6203 */
6204FNIEMOP_DEF(iemOp_cbw)
6205{
6206 switch (pVCpu->iem.s.enmEffOpSize)
6207 {
6208 case IEMMODE_16BIT:
6209 IEMOP_MNEMONIC(cbw, "cbw");
6210 IEM_MC_BEGIN(0, 1);
6211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6212 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6213 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6214 } IEM_MC_ELSE() {
6215 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6216 } IEM_MC_ENDIF();
6217 IEM_MC_ADVANCE_RIP_AND_FINISH();
6218 IEM_MC_END();
6219 break;
6220
6221 case IEMMODE_32BIT:
6222 IEMOP_MNEMONIC(cwde, "cwde");
6223 IEM_MC_BEGIN(0, 1);
6224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6225 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6226 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6227 } IEM_MC_ELSE() {
6228 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6229 } IEM_MC_ENDIF();
6230 IEM_MC_ADVANCE_RIP_AND_FINISH();
6231 IEM_MC_END();
6232 break;
6233
6234 case IEMMODE_64BIT:
6235 IEMOP_MNEMONIC(cdqe, "cdqe");
6236 IEM_MC_BEGIN(0, 1);
6237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6238 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6239 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6240 } IEM_MC_ELSE() {
6241 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6242 } IEM_MC_ENDIF();
6243 IEM_MC_ADVANCE_RIP_AND_FINISH();
6244 IEM_MC_END();
6245 break;
6246
6247 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6248 }
6249}
6250
6251
6252/**
6253 * @opcode 0x99
6254 */
6255FNIEMOP_DEF(iemOp_cwd)
6256{
6257 switch (pVCpu->iem.s.enmEffOpSize)
6258 {
6259 case IEMMODE_16BIT:
6260 IEMOP_MNEMONIC(cwd, "cwd");
6261 IEM_MC_BEGIN(0, 1);
6262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6263 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6264 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6265 } IEM_MC_ELSE() {
6266 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6267 } IEM_MC_ENDIF();
6268 IEM_MC_ADVANCE_RIP_AND_FINISH();
6269 IEM_MC_END();
6270 break;
6271
6272 case IEMMODE_32BIT:
6273 IEMOP_MNEMONIC(cdq, "cdq");
6274 IEM_MC_BEGIN(0, 1);
6275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6276 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6277 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6278 } IEM_MC_ELSE() {
6279 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6280 } IEM_MC_ENDIF();
6281 IEM_MC_ADVANCE_RIP_AND_FINISH();
6282 IEM_MC_END();
6283 break;
6284
6285 case IEMMODE_64BIT:
6286 IEMOP_MNEMONIC(cqo, "cqo");
6287 IEM_MC_BEGIN(0, 1);
6288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6289 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6290 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6291 } IEM_MC_ELSE() {
6292 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6293 } IEM_MC_ENDIF();
6294 IEM_MC_ADVANCE_RIP_AND_FINISH();
6295 IEM_MC_END();
6296 break;
6297
6298 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6299 }
6300}
6301
6302
6303/**
6304 * @opcode 0x9a
6305 */
6306FNIEMOP_DEF(iemOp_call_Ap)
6307{
6308 IEMOP_MNEMONIC(call_Ap, "call Ap");
6309 IEMOP_HLP_NO_64BIT();
6310
6311 /* Decode the far pointer address and pass it on to the far call C implementation. */
6312 uint32_t off32Seg;
6313 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6314 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6315 else
6316 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6317 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6319 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
6320 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
6321 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6322}
6323
6324
6325/** Opcode 0x9b. (aka fwait) */
6326FNIEMOP_DEF(iemOp_wait)
6327{
6328 IEMOP_MNEMONIC(wait, "wait");
6329 IEM_MC_BEGIN(0, 0);
6330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6331 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6332 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6333 IEM_MC_ADVANCE_RIP_AND_FINISH();
6334 IEM_MC_END();
6335}
6336
6337
6338/**
6339 * @opcode 0x9c
6340 */
6341FNIEMOP_DEF(iemOp_pushf_Fv)
6342{
6343 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6345 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6346 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6347}
6348
6349
6350/**
6351 * @opcode 0x9d
6352 */
6353FNIEMOP_DEF(iemOp_popf_Fv)
6354{
6355 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6357 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6358 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6359 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6360}
6361
6362
6363/**
6364 * @opcode 0x9e
6365 */
6366FNIEMOP_DEF(iemOp_sahf)
6367{
6368 IEMOP_MNEMONIC(sahf, "sahf");
6369 if ( IEM_IS_64BIT_CODE(pVCpu)
6370 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6371 IEMOP_RAISE_INVALID_OPCODE_RET();
6372 IEM_MC_BEGIN(0, 2);
6373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6374 IEM_MC_LOCAL(uint32_t, u32Flags);
6375 IEM_MC_LOCAL(uint32_t, EFlags);
6376 IEM_MC_FETCH_EFLAGS(EFlags);
6377 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6378 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6379 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6380 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6381 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6382 IEM_MC_COMMIT_EFLAGS(EFlags);
6383 IEM_MC_ADVANCE_RIP_AND_FINISH();
6384 IEM_MC_END();
6385}
6386
6387
6388/**
6389 * @opcode 0x9f
6390 */
6391FNIEMOP_DEF(iemOp_lahf)
6392{
6393 IEMOP_MNEMONIC(lahf, "lahf");
6394 if ( IEM_IS_64BIT_CODE(pVCpu)
6395 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6396 IEMOP_RAISE_INVALID_OPCODE_RET();
6397 IEM_MC_BEGIN(0, 1);
6398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6399 IEM_MC_LOCAL(uint8_t, u8Flags);
6400 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6401 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6402 IEM_MC_ADVANCE_RIP_AND_FINISH();
6403 IEM_MC_END();
6404}
6405
6406
6407/**
6408 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6409 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6410 * Will return/throw on failures.
6411 * @param a_GCPtrMemOff The variable to store the offset in.
6412 */
6413#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6414 do \
6415 { \
6416 switch (pVCpu->iem.s.enmEffAddrMode) \
6417 { \
6418 case IEMMODE_16BIT: \
6419 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6420 break; \
6421 case IEMMODE_32BIT: \
6422 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6423 break; \
6424 case IEMMODE_64BIT: \
6425 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6426 break; \
6427 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6428 } \
6429 } while (0)
6430
6431/**
6432 * @opcode 0xa0
6433 */
6434FNIEMOP_DEF(iemOp_mov_AL_Ob)
6435{
6436 /*
6437 * Get the offset.
6438 */
6439 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6440 RTGCPTR GCPtrMemOff;
6441 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6442
6443 /*
6444 * Fetch AL.
6445 */
6446 IEM_MC_BEGIN(0,1);
6447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6448 IEM_MC_LOCAL(uint8_t, u8Tmp);
6449 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6450 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6451 IEM_MC_ADVANCE_RIP_AND_FINISH();
6452 IEM_MC_END();
6453}
6454
6455
6456/**
6457 * @opcode 0xa1
6458 */
6459FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6460{
6461 /*
6462 * Get the offset.
6463 */
6464 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6465 RTGCPTR GCPtrMemOff;
6466 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6467
6468 /*
6469 * Fetch rAX.
6470 */
6471 switch (pVCpu->iem.s.enmEffOpSize)
6472 {
6473 case IEMMODE_16BIT:
6474 IEM_MC_BEGIN(0,1);
6475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6476 IEM_MC_LOCAL(uint16_t, u16Tmp);
6477 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6478 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6479 IEM_MC_ADVANCE_RIP_AND_FINISH();
6480 IEM_MC_END();
6481 break;
6482
6483 case IEMMODE_32BIT:
6484 IEM_MC_BEGIN(0,1);
6485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6486 IEM_MC_LOCAL(uint32_t, u32Tmp);
6487 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6488 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6489 IEM_MC_ADVANCE_RIP_AND_FINISH();
6490 IEM_MC_END();
6491 break;
6492
6493 case IEMMODE_64BIT:
6494 IEM_MC_BEGIN(0,1);
6495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6496 IEM_MC_LOCAL(uint64_t, u64Tmp);
6497 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6498 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6499 IEM_MC_ADVANCE_RIP_AND_FINISH();
6500 IEM_MC_END();
6501 break;
6502
6503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6504 }
6505}
6506
6507
6508/**
6509 * @opcode 0xa2
6510 */
6511FNIEMOP_DEF(iemOp_mov_Ob_AL)
6512{
6513 /*
6514 * Get the offset.
6515 */
6516 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6517 RTGCPTR GCPtrMemOff;
6518 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6519
6520 /*
6521 * Store AL.
6522 */
6523 IEM_MC_BEGIN(0,1);
6524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6525 IEM_MC_LOCAL(uint8_t, u8Tmp);
6526 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6527 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6528 IEM_MC_ADVANCE_RIP_AND_FINISH();
6529 IEM_MC_END();
6530}
6531
6532
6533/**
6534 * @opcode 0xa3
6535 */
6536FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6537{
6538 /*
6539 * Get the offset.
6540 */
6541 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6542 RTGCPTR GCPtrMemOff;
6543 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6544
6545 /*
6546 * Store rAX.
6547 */
6548 switch (pVCpu->iem.s.enmEffOpSize)
6549 {
6550 case IEMMODE_16BIT:
6551 IEM_MC_BEGIN(0,1);
6552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6553 IEM_MC_LOCAL(uint16_t, u16Tmp);
6554 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6555 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6556 IEM_MC_ADVANCE_RIP_AND_FINISH();
6557 IEM_MC_END();
6558 break;
6559
6560 case IEMMODE_32BIT:
6561 IEM_MC_BEGIN(0,1);
6562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6563 IEM_MC_LOCAL(uint32_t, u32Tmp);
6564 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6565 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6566 IEM_MC_ADVANCE_RIP_AND_FINISH();
6567 IEM_MC_END();
6568 break;
6569
6570 case IEMMODE_64BIT:
6571 IEM_MC_BEGIN(0,1);
6572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6573 IEM_MC_LOCAL(uint64_t, u64Tmp);
6574 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6575 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6576 IEM_MC_ADVANCE_RIP_AND_FINISH();
6577 IEM_MC_END();
6578 break;
6579
6580 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6581 }
6582}
6583
6584/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6585#define IEM_MOVS_CASE(ValBits, AddrBits) \
6586 IEM_MC_BEGIN(0, 2); \
6587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6588 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6589 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6590 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6591 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6592 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6593 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6594 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6595 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6596 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6597 } IEM_MC_ELSE() { \
6598 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6599 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6600 } IEM_MC_ENDIF(); \
6601 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6602 IEM_MC_END() \
6603
6604/**
6605 * @opcode 0xa4
6606 */
6607FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6608{
6609 /*
6610 * Use the C implementation if a repeat prefix is encountered.
6611 */
6612 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6613 {
6614 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6616 switch (pVCpu->iem.s.enmEffAddrMode)
6617 {
6618 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6619 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6620 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6622 }
6623 }
6624
6625 /*
6626 * Sharing case implementation with movs[wdq] below.
6627 */
6628 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6629 switch (pVCpu->iem.s.enmEffAddrMode)
6630 {
6631 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
6632 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
6633 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
6634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6635 }
6636}
6637
6638
6639/**
6640 * @opcode 0xa5
6641 */
6642FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6643{
6644
6645 /*
6646 * Use the C implementation if a repeat prefix is encountered.
6647 */
6648 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6649 {
6650 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6652 switch (pVCpu->iem.s.enmEffOpSize)
6653 {
6654 case IEMMODE_16BIT:
6655 switch (pVCpu->iem.s.enmEffAddrMode)
6656 {
6657 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6658 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6659 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6660 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6661 }
6662 break;
6663 case IEMMODE_32BIT:
6664 switch (pVCpu->iem.s.enmEffAddrMode)
6665 {
6666 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6667 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6668 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6669 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6670 }
6671 case IEMMODE_64BIT:
6672 switch (pVCpu->iem.s.enmEffAddrMode)
6673 {
6674 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6675 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6676 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6677 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6678 }
6679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6680 }
6681 }
6682
6683 /*
6684 * Annoying double switch here.
6685 * Using ugly macro for implementing the cases, sharing it with movsb.
6686 */
6687 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6688 switch (pVCpu->iem.s.enmEffOpSize)
6689 {
6690 case IEMMODE_16BIT:
6691 switch (pVCpu->iem.s.enmEffAddrMode)
6692 {
6693 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
6694 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
6695 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
6696 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6697 }
6698 break;
6699
6700 case IEMMODE_32BIT:
6701 switch (pVCpu->iem.s.enmEffAddrMode)
6702 {
6703 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
6704 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
6705 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
6706 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6707 }
6708 break;
6709
6710 case IEMMODE_64BIT:
6711 switch (pVCpu->iem.s.enmEffAddrMode)
6712 {
6713 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6714 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
6715 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
6716 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6717 }
6718 break;
6719 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6720 }
6721}
6722
6723#undef IEM_MOVS_CASE
6724
6725/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6726#define IEM_CMPS_CASE(ValBits, AddrBits) \
6727 IEM_MC_BEGIN(3, 3); \
6728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6729 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
6730 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6731 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6732 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6733 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6734 \
6735 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6736 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
6737 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6738 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
6739 IEM_MC_REF_LOCAL(puValue1, uValue1); \
6740 IEM_MC_REF_EFLAGS(pEFlags); \
6741 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6742 \
6743 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6744 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6745 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6746 } IEM_MC_ELSE() { \
6747 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6748 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6749 } IEM_MC_ENDIF(); \
6750 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6751 IEM_MC_END() \
6752
6753/**
6754 * @opcode 0xa6
6755 */
6756FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6757{
6758
6759 /*
6760 * Use the C implementation if a repeat prefix is encountered.
6761 */
6762 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6763 {
6764 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6766 switch (pVCpu->iem.s.enmEffAddrMode)
6767 {
6768 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6769 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6770 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6771 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6772 }
6773 }
6774 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6775 {
6776 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6778 switch (pVCpu->iem.s.enmEffAddrMode)
6779 {
6780 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6781 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6782 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6784 }
6785 }
6786
6787 /*
6788 * Sharing case implementation with cmps[wdq] below.
6789 */
6790 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
6791 switch (pVCpu->iem.s.enmEffAddrMode)
6792 {
6793 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
6794 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
6795 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
6796 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6797 }
6798}
6799
6800
6801/**
6802 * @opcode 0xa7
6803 */
6804FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
6805{
6806 /*
6807 * Use the C implementation if a repeat prefix is encountered.
6808 */
6809 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6810 {
6811 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
6812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6813 switch (pVCpu->iem.s.enmEffOpSize)
6814 {
6815 case IEMMODE_16BIT:
6816 switch (pVCpu->iem.s.enmEffAddrMode)
6817 {
6818 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6819 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6820 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6821 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6822 }
6823 break;
6824 case IEMMODE_32BIT:
6825 switch (pVCpu->iem.s.enmEffAddrMode)
6826 {
6827 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6828 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6829 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6831 }
6832 case IEMMODE_64BIT:
6833 switch (pVCpu->iem.s.enmEffAddrMode)
6834 {
6835 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
6836 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6837 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6839 }
6840 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6841 }
6842 }
6843
6844 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6845 {
6846 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
6847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6848 switch (pVCpu->iem.s.enmEffOpSize)
6849 {
6850 case IEMMODE_16BIT:
6851 switch (pVCpu->iem.s.enmEffAddrMode)
6852 {
6853 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6854 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6855 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6856 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6857 }
6858 break;
6859 case IEMMODE_32BIT:
6860 switch (pVCpu->iem.s.enmEffAddrMode)
6861 {
6862 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6863 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6864 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6865 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6866 }
6867 case IEMMODE_64BIT:
6868 switch (pVCpu->iem.s.enmEffAddrMode)
6869 {
6870 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
6871 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6872 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6873 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6874 }
6875 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6876 }
6877 }
6878
6879 /*
6880 * Annoying double switch here.
6881 * Using ugly macro for implementing the cases, sharing it with cmpsb.
6882 */
6883 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
6884 switch (pVCpu->iem.s.enmEffOpSize)
6885 {
6886 case IEMMODE_16BIT:
6887 switch (pVCpu->iem.s.enmEffAddrMode)
6888 {
6889 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
6890 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
6891 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
6892 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6893 }
6894 break;
6895
6896 case IEMMODE_32BIT:
6897 switch (pVCpu->iem.s.enmEffAddrMode)
6898 {
6899 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
6900 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
6901 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
6902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6903 }
6904 break;
6905
6906 case IEMMODE_64BIT:
6907 switch (pVCpu->iem.s.enmEffAddrMode)
6908 {
6909 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6910 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
6911 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
6912 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6913 }
6914 break;
6915 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6916 }
6917}
6918
6919#undef IEM_CMPS_CASE
6920
6921/**
6922 * @opcode 0xa8
6923 */
6924FNIEMOP_DEF(iemOp_test_AL_Ib)
6925{
6926 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
6927 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6928 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
6929}
6930
6931
6932/**
6933 * @opcode 0xa9
6934 */
6935FNIEMOP_DEF(iemOp_test_eAX_Iz)
6936{
6937 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
6938 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6939 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
6940}
6941
6942
6943/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
6944#define IEM_STOS_CASE(ValBits, AddrBits) \
6945 IEM_MC_BEGIN(0, 2); \
6946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6947 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6948 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6949 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
6950 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6951 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6952 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6953 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6954 } IEM_MC_ELSE() { \
6955 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6956 } IEM_MC_ENDIF(); \
6957 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6958 IEM_MC_END() \
6959
6960/**
6961 * @opcode 0xaa
6962 */
6963FNIEMOP_DEF(iemOp_stosb_Yb_AL)
6964{
6965 /*
6966 * Use the C implementation if a repeat prefix is encountered.
6967 */
6968 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6969 {
6970 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
6971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6972 switch (pVCpu->iem.s.enmEffAddrMode)
6973 {
6974 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m16);
6975 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m32);
6976 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m64);
6977 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6978 }
6979 }
6980
6981 /*
6982 * Sharing case implementation with stos[wdq] below.
6983 */
6984 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
6985 switch (pVCpu->iem.s.enmEffAddrMode)
6986 {
6987 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
6988 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
6989 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
6990 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6991 }
6992}
6993
6994
6995/**
6996 * @opcode 0xab
6997 */
6998FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
6999{
7000 /*
7001 * Use the C implementation if a repeat prefix is encountered.
7002 */
7003 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7004 {
7005 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7007 switch (pVCpu->iem.s.enmEffOpSize)
7008 {
7009 case IEMMODE_16BIT:
7010 switch (pVCpu->iem.s.enmEffAddrMode)
7011 {
7012 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m16);
7013 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m32);
7014 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m64);
7015 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7016 }
7017 break;
7018 case IEMMODE_32BIT:
7019 switch (pVCpu->iem.s.enmEffAddrMode)
7020 {
7021 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m16);
7022 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m32);
7023 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m64);
7024 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7025 }
7026 case IEMMODE_64BIT:
7027 switch (pVCpu->iem.s.enmEffAddrMode)
7028 {
7029 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7030 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m32);
7031 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m64);
7032 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7033 }
7034 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7035 }
7036 }
7037
7038 /*
7039 * Annoying double switch here.
7040 * Using ugly macro for implementing the cases, sharing it with stosb.
7041 */
7042 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7043 switch (pVCpu->iem.s.enmEffOpSize)
7044 {
7045 case IEMMODE_16BIT:
7046 switch (pVCpu->iem.s.enmEffAddrMode)
7047 {
7048 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
7049 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
7050 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
7051 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7052 }
7053 break;
7054
7055 case IEMMODE_32BIT:
7056 switch (pVCpu->iem.s.enmEffAddrMode)
7057 {
7058 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
7059 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
7060 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
7061 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7062 }
7063 break;
7064
7065 case IEMMODE_64BIT:
7066 switch (pVCpu->iem.s.enmEffAddrMode)
7067 {
7068 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7069 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
7070 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
7071 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7072 }
7073 break;
7074 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7075 }
7076}
7077
7078#undef IEM_STOS_CASE
7079
7080/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7081#define IEM_LODS_CASE(ValBits, AddrBits) \
7082 IEM_MC_BEGIN(0, 2); \
7083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7084 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7085 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7086 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7087 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7088 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7089 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7090 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7091 } IEM_MC_ELSE() { \
7092 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7093 } IEM_MC_ENDIF(); \
7094 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7095 IEM_MC_END() \
7096
7097/**
7098 * @opcode 0xac
7099 */
7100FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7101{
7102 /*
7103 * Use the C implementation if a repeat prefix is encountered.
7104 */
7105 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7106 {
7107 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7109 switch (pVCpu->iem.s.enmEffAddrMode)
7110 {
7111 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7112 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7113 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7114 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7115 }
7116 }
7117
7118 /*
7119 * Sharing case implementation with stos[wdq] below.
7120 */
7121 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7122 switch (pVCpu->iem.s.enmEffAddrMode)
7123 {
7124 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
7125 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
7126 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
7127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7128 }
7129}
7130
7131
7132/**
7133 * @opcode 0xad
7134 */
7135FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7136{
7137 /*
7138 * Use the C implementation if a repeat prefix is encountered.
7139 */
7140 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7141 {
7142 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7144 switch (pVCpu->iem.s.enmEffOpSize)
7145 {
7146 case IEMMODE_16BIT:
7147 switch (pVCpu->iem.s.enmEffAddrMode)
7148 {
7149 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7150 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7151 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7152 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7153 }
7154 break;
7155 case IEMMODE_32BIT:
7156 switch (pVCpu->iem.s.enmEffAddrMode)
7157 {
7158 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7159 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7160 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7162 }
7163 case IEMMODE_64BIT:
7164 switch (pVCpu->iem.s.enmEffAddrMode)
7165 {
7166 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7167 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7168 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7169 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7170 }
7171 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7172 }
7173 }
7174
7175 /*
7176 * Annoying double switch here.
7177 * Using ugly macro for implementing the cases, sharing it with lodsb.
7178 */
7179 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7180 switch (pVCpu->iem.s.enmEffOpSize)
7181 {
7182 case IEMMODE_16BIT:
7183 switch (pVCpu->iem.s.enmEffAddrMode)
7184 {
7185 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
7186 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
7187 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
7188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7189 }
7190 break;
7191
7192 case IEMMODE_32BIT:
7193 switch (pVCpu->iem.s.enmEffAddrMode)
7194 {
7195 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
7196 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
7197 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
7198 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7199 }
7200 break;
7201
7202 case IEMMODE_64BIT:
7203 switch (pVCpu->iem.s.enmEffAddrMode)
7204 {
7205 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7206 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
7207 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
7208 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7209 }
7210 break;
7211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7212 }
7213}
7214
7215#undef IEM_LODS_CASE
7216
7217/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7218#define IEM_SCAS_CASE(ValBits, AddrBits) \
7219 IEM_MC_BEGIN(3, 2); \
7220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7221 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7222 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7223 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7224 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7225 \
7226 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7227 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7228 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7229 IEM_MC_REF_EFLAGS(pEFlags); \
7230 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7231 \
7232 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7233 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7234 } IEM_MC_ELSE() { \
7235 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7236 } IEM_MC_ENDIF(); \
7237 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7238 IEM_MC_END();
7239
7240/**
7241 * @opcode 0xae
7242 */
7243FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7244{
7245 /*
7246 * Use the C implementation if a repeat prefix is encountered.
7247 */
7248 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7249 {
7250 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7252 switch (pVCpu->iem.s.enmEffAddrMode)
7253 {
7254 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m16);
7255 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m32);
7256 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m64);
7257 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7258 }
7259 }
7260 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7261 {
7262 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7264 switch (pVCpu->iem.s.enmEffAddrMode)
7265 {
7266 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m16);
7267 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m32);
7268 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m64);
7269 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7270 }
7271 }
7272
7273 /*
7274 * Sharing case implementation with stos[wdq] below.
7275 */
7276 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7277 switch (pVCpu->iem.s.enmEffAddrMode)
7278 {
7279 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
7280 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
7281 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
7282 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7283 }
7284}
7285
7286
7287/**
7288 * @opcode 0xaf
7289 */
7290FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7291{
7292 /*
7293 * Use the C implementation if a repeat prefix is encountered.
7294 */
7295 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7296 {
7297 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7299 switch (pVCpu->iem.s.enmEffOpSize)
7300 {
7301 case IEMMODE_16BIT:
7302 switch (pVCpu->iem.s.enmEffAddrMode)
7303 {
7304 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m16);
7305 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m32);
7306 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m64);
7307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7308 }
7309 break;
7310 case IEMMODE_32BIT:
7311 switch (pVCpu->iem.s.enmEffAddrMode)
7312 {
7313 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m16);
7314 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m32);
7315 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m64);
7316 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7317 }
7318 case IEMMODE_64BIT:
7319 switch (pVCpu->iem.s.enmEffAddrMode)
7320 {
7321 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7322 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m32);
7323 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m64);
7324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7325 }
7326 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7327 }
7328 }
7329 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7330 {
7331 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7333 switch (pVCpu->iem.s.enmEffOpSize)
7334 {
7335 case IEMMODE_16BIT:
7336 switch (pVCpu->iem.s.enmEffAddrMode)
7337 {
7338 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m16);
7339 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m32);
7340 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m64);
7341 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7342 }
7343 break;
7344 case IEMMODE_32BIT:
7345 switch (pVCpu->iem.s.enmEffAddrMode)
7346 {
7347 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m16);
7348 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m32);
7349 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m64);
7350 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7351 }
7352 case IEMMODE_64BIT:
7353 switch (pVCpu->iem.s.enmEffAddrMode)
7354 {
7355 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
7356 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m32);
7357 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m64);
7358 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7359 }
7360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7361 }
7362 }
7363
7364 /*
7365 * Annoying double switch here.
7366 * Using ugly macro for implementing the cases, sharing it with scasb.
7367 */
7368 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
7369 switch (pVCpu->iem.s.enmEffOpSize)
7370 {
7371 case IEMMODE_16BIT:
7372 switch (pVCpu->iem.s.enmEffAddrMode)
7373 {
7374 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
7375 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
7376 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
7377 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7378 }
7379 break;
7380
7381 case IEMMODE_32BIT:
7382 switch (pVCpu->iem.s.enmEffAddrMode)
7383 {
7384 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
7385 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
7386 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
7387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7388 }
7389 break;
7390
7391 case IEMMODE_64BIT:
7392 switch (pVCpu->iem.s.enmEffAddrMode)
7393 {
7394 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7395 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
7396 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
7397 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7398 }
7399 break;
7400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7401 }
7402}
7403
7404#undef IEM_SCAS_CASE
7405
7406/**
7407 * Common 'mov r8, imm8' helper.
7408 */
7409FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
7410{
7411 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7412 IEM_MC_BEGIN(0, 1);
7413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7414 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
7415 IEM_MC_STORE_GREG_U8(iFixedReg, u8Value);
7416 IEM_MC_ADVANCE_RIP_AND_FINISH();
7417 IEM_MC_END();
7418}
7419
7420
7421/**
7422 * @opcode 0xb0
7423 */
7424FNIEMOP_DEF(iemOp_mov_AL_Ib)
7425{
7426 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
7427 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7428}
7429
7430
7431/**
7432 * @opcode 0xb1
7433 */
7434FNIEMOP_DEF(iemOp_CL_Ib)
7435{
7436 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
7437 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7438}
7439
7440
7441/**
7442 * @opcode 0xb2
7443 */
7444FNIEMOP_DEF(iemOp_DL_Ib)
7445{
7446 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
7447 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7448}
7449
7450
7451/**
7452 * @opcode 0xb3
7453 */
7454FNIEMOP_DEF(iemOp_BL_Ib)
7455{
7456 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
7457 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7458}
7459
7460
7461/**
7462 * @opcode 0xb4
7463 */
7464FNIEMOP_DEF(iemOp_mov_AH_Ib)
7465{
7466 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
7467 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7468}
7469
7470
7471/**
7472 * @opcode 0xb5
7473 */
7474FNIEMOP_DEF(iemOp_CH_Ib)
7475{
7476 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
7477 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7478}
7479
7480
7481/**
7482 * @opcode 0xb6
7483 */
7484FNIEMOP_DEF(iemOp_DH_Ib)
7485{
7486 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
7487 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7488}
7489
7490
7491/**
7492 * @opcode 0xb7
7493 */
7494FNIEMOP_DEF(iemOp_BH_Ib)
7495{
7496 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
7497 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7498}
7499
7500
7501/**
7502 * Common 'mov regX,immX' helper.
7503 */
7504FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
7505{
7506 switch (pVCpu->iem.s.enmEffOpSize)
7507 {
7508 case IEMMODE_16BIT:
7509 {
7510 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7511 IEM_MC_BEGIN(0, 1);
7512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7513 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
7514 IEM_MC_STORE_GREG_U16(iFixedReg, u16Value);
7515 IEM_MC_ADVANCE_RIP_AND_FINISH();
7516 IEM_MC_END();
7517 break;
7518 }
7519
7520 case IEMMODE_32BIT:
7521 {
7522 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7523 IEM_MC_BEGIN(0, 1);
7524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7525 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
7526 IEM_MC_STORE_GREG_U32(iFixedReg, u32Value);
7527 IEM_MC_ADVANCE_RIP_AND_FINISH();
7528 IEM_MC_END();
7529 break;
7530 }
7531 case IEMMODE_64BIT:
7532 {
7533 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
7534 IEM_MC_BEGIN(0, 1);
7535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7536 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
7537 IEM_MC_STORE_GREG_U64(iFixedReg, u64Value);
7538 IEM_MC_ADVANCE_RIP_AND_FINISH();
7539 IEM_MC_END();
7540 break;
7541 }
7542 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7543 }
7544}
7545
7546
7547/**
7548 * @opcode 0xb8
7549 */
7550FNIEMOP_DEF(iemOp_eAX_Iv)
7551{
7552 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
7553 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7554}
7555
7556
7557/**
7558 * @opcode 0xb9
7559 */
7560FNIEMOP_DEF(iemOp_eCX_Iv)
7561{
7562 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
7563 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7564}
7565
7566
7567/**
7568 * @opcode 0xba
7569 */
7570FNIEMOP_DEF(iemOp_eDX_Iv)
7571{
7572 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
7573 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7574}
7575
7576
7577/**
7578 * @opcode 0xbb
7579 */
7580FNIEMOP_DEF(iemOp_eBX_Iv)
7581{
7582 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
7583 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7584}
7585
7586
7587/**
7588 * @opcode 0xbc
7589 */
7590FNIEMOP_DEF(iemOp_eSP_Iv)
7591{
7592 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
7593 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7594}
7595
7596
7597/**
7598 * @opcode 0xbd
7599 */
7600FNIEMOP_DEF(iemOp_eBP_Iv)
7601{
7602 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
7603 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7604}
7605
7606
7607/**
7608 * @opcode 0xbe
7609 */
7610FNIEMOP_DEF(iemOp_eSI_Iv)
7611{
7612 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
7613 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7614}
7615
7616
7617/**
7618 * @opcode 0xbf
7619 */
7620FNIEMOP_DEF(iemOp_eDI_Iv)
7621{
7622 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
7623 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7624}
7625
7626
7627/**
7628 * @opcode 0xc0
7629 */
7630FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
7631{
7632 IEMOP_HLP_MIN_186();
7633 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7634 PCIEMOPSHIFTSIZES pImpl;
7635 switch (IEM_GET_MODRM_REG_8(bRm))
7636 {
7637 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
7638 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
7639 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
7640 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
7641 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
7642 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
7643 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
7644 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7645 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7646 }
7647 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7648
7649 if (IEM_IS_MODRM_REG_MODE(bRm))
7650 {
7651 /* register */
7652 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7653 IEM_MC_BEGIN(3, 0);
7654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7655 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7656 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7657 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7658 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7659 IEM_MC_REF_EFLAGS(pEFlags);
7660 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7661 IEM_MC_ADVANCE_RIP_AND_FINISH();
7662 IEM_MC_END();
7663 }
7664 else
7665 {
7666 /* memory */
7667 IEM_MC_BEGIN(3, 3);
7668 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7669 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7670 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7672 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7673
7674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7675 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7676 IEM_MC_ASSIGN(cShiftArg, cShift);
7677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7678 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7679 IEM_MC_FETCH_EFLAGS(EFlags);
7680 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7681
7682 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
7683 IEM_MC_COMMIT_EFLAGS(EFlags);
7684 IEM_MC_ADVANCE_RIP_AND_FINISH();
7685 IEM_MC_END();
7686 }
7687}
7688
7689
7690/**
7691 * @opcode 0xc1
7692 */
7693FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
7694{
7695 IEMOP_HLP_MIN_186();
7696 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7697 PCIEMOPSHIFTSIZES pImpl;
7698 switch (IEM_GET_MODRM_REG_8(bRm))
7699 {
7700 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
7701 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
7702 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
7703 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
7704 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
7705 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
7706 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
7707 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7708 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7709 }
7710 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7711
7712 if (IEM_IS_MODRM_REG_MODE(bRm))
7713 {
7714 /* register */
7715 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7716 switch (pVCpu->iem.s.enmEffOpSize)
7717 {
7718 case IEMMODE_16BIT:
7719 IEM_MC_BEGIN(3, 0);
7720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7721 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7722 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7723 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7724 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7725 IEM_MC_REF_EFLAGS(pEFlags);
7726 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7727 IEM_MC_ADVANCE_RIP_AND_FINISH();
7728 IEM_MC_END();
7729 break;
7730
7731 case IEMMODE_32BIT:
7732 IEM_MC_BEGIN(3, 0);
7733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7734 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7735 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7736 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7737 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7738 IEM_MC_REF_EFLAGS(pEFlags);
7739 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7740 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7741 IEM_MC_ADVANCE_RIP_AND_FINISH();
7742 IEM_MC_END();
7743 break;
7744
7745 case IEMMODE_64BIT:
7746 IEM_MC_BEGIN(3, 0);
7747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7748 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7749 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7750 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7751 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7752 IEM_MC_REF_EFLAGS(pEFlags);
7753 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7754 IEM_MC_ADVANCE_RIP_AND_FINISH();
7755 IEM_MC_END();
7756 break;
7757
7758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7759 }
7760 }
7761 else
7762 {
7763 /* memory */
7764 switch (pVCpu->iem.s.enmEffOpSize)
7765 {
7766 case IEMMODE_16BIT:
7767 IEM_MC_BEGIN(3, 2);
7768 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7769 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7770 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7772
7773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7774 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7775 IEM_MC_ASSIGN(cShiftArg, cShift);
7776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7777 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7778 IEM_MC_FETCH_EFLAGS(EFlags);
7779 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7780
7781 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7782 IEM_MC_COMMIT_EFLAGS(EFlags);
7783 IEM_MC_ADVANCE_RIP_AND_FINISH();
7784 IEM_MC_END();
7785 break;
7786
7787 case IEMMODE_32BIT:
7788 IEM_MC_BEGIN(3, 2);
7789 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7790 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7791 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7793
7794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7795 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7796 IEM_MC_ASSIGN(cShiftArg, cShift);
7797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7798 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7799 IEM_MC_FETCH_EFLAGS(EFlags);
7800 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7801
7802 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7803 IEM_MC_COMMIT_EFLAGS(EFlags);
7804 IEM_MC_ADVANCE_RIP_AND_FINISH();
7805 IEM_MC_END();
7806 break;
7807
7808 case IEMMODE_64BIT:
7809 IEM_MC_BEGIN(3, 2);
7810 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7811 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7812 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7814
7815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7816 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7817 IEM_MC_ASSIGN(cShiftArg, cShift);
7818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7819 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7820 IEM_MC_FETCH_EFLAGS(EFlags);
7821 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7822
7823 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7824 IEM_MC_COMMIT_EFLAGS(EFlags);
7825 IEM_MC_ADVANCE_RIP_AND_FINISH();
7826 IEM_MC_END();
7827 break;
7828
7829 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7830 }
7831 }
7832}
7833
7834
7835/**
7836 * @opcode 0xc2
7837 */
7838FNIEMOP_DEF(iemOp_retn_Iw)
7839{
7840 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
7841 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7842 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7844 switch (pVCpu->iem.s.enmEffOpSize)
7845 {
7846 case IEMMODE_16BIT:
7847 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_16, u16Imm);
7848 case IEMMODE_32BIT:
7849 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_32, u16Imm);
7850 case IEMMODE_64BIT:
7851 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_64, u16Imm);
7852 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7853 }
7854}
7855
7856
7857/**
7858 * @opcode 0xc3
7859 */
7860FNIEMOP_DEF(iemOp_retn)
7861{
7862 IEMOP_MNEMONIC(retn, "retn");
7863 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7865 switch (pVCpu->iem.s.enmEffOpSize)
7866 {
7867 case IEMMODE_16BIT:
7868 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_16);
7869 case IEMMODE_32BIT:
7870 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_32);
7871 case IEMMODE_64BIT:
7872 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_64);
7873 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7874 }
7875}
7876
7877
7878/**
7879 * @opcode 0xc4
7880 */
7881FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
7882{
7883 /* The LDS instruction is invalid 64-bit mode. In legacy and
7884 compatability mode it is invalid with MOD=3.
7885 The use as a VEX prefix is made possible by assigning the inverted
7886 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
7887 outside of 64-bit mode. VEX is not available in real or v86 mode. */
7888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7889 if ( IEM_IS_64BIT_CODE(pVCpu)
7890 || IEM_IS_MODRM_REG_MODE(bRm) )
7891 {
7892 IEMOP_MNEMONIC(vex3_prefix, "vex3");
7893 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7894 {
7895 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7896 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7897 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
7898 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7899 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7900 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
7901 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
7902 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7903 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
7904 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
7905 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
7906 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
7907 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
7908
7909 switch (bRm & 0x1f)
7910 {
7911 case 1: /* 0x0f lead opcode byte. */
7912#ifdef IEM_WITH_VEX
7913 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7914#else
7915 IEMOP_BITCH_ABOUT_STUB();
7916 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7917#endif
7918
7919 case 2: /* 0x0f 0x38 lead opcode bytes. */
7920#ifdef IEM_WITH_VEX
7921 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7922#else
7923 IEMOP_BITCH_ABOUT_STUB();
7924 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7925#endif
7926
7927 case 3: /* 0x0f 0x3a lead opcode bytes. */
7928#ifdef IEM_WITH_VEX
7929 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7930#else
7931 IEMOP_BITCH_ABOUT_STUB();
7932 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7933#endif
7934
7935 default:
7936 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
7937 IEMOP_RAISE_INVALID_OPCODE_RET();
7938 }
7939 }
7940 Log(("VEX3: VEX support disabled!\n"));
7941 IEMOP_RAISE_INVALID_OPCODE_RET();
7942 }
7943
7944 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
7945 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
7946}
7947
7948
7949/**
7950 * @opcode 0xc5
7951 */
7952FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
7953{
7954 /* The LES instruction is invalid 64-bit mode. In legacy and
7955 compatability mode it is invalid with MOD=3.
7956 The use as a VEX prefix is made possible by assigning the inverted
7957 REX.R to the top MOD bit, and the top bit in the inverted register
7958 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
7959 to accessing registers 0..7 in this VEX form. */
7960 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7961 if ( IEM_IS_64BIT_CODE(pVCpu)
7962 || IEM_IS_MODRM_REG_MODE(bRm))
7963 {
7964 IEMOP_MNEMONIC(vex2_prefix, "vex2");
7965 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7966 {
7967 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7968 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7969 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7970 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7971 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7972 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
7973 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
7974 pVCpu->iem.s.idxPrefix = bRm & 0x3;
7975
7976#ifdef IEM_WITH_VEX
7977 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7978#else
7979 IEMOP_BITCH_ABOUT_STUB();
7980 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7981#endif
7982 }
7983
7984 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
7985 Log(("VEX2: VEX support disabled!\n"));
7986 IEMOP_RAISE_INVALID_OPCODE_RET();
7987 }
7988
7989 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
7990 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
7991}
7992
7993
7994/**
7995 * @opcode 0xc6
7996 */
7997FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
7998{
7999 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8000 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8001 IEMOP_RAISE_INVALID_OPCODE_RET();
8002 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
8003
8004 if (IEM_IS_MODRM_REG_MODE(bRm))
8005 {
8006 /* register access */
8007 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8008 IEM_MC_BEGIN(0, 0);
8009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8010 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
8011 IEM_MC_ADVANCE_RIP_AND_FINISH();
8012 IEM_MC_END();
8013 }
8014 else
8015 {
8016 /* memory access. */
8017 IEM_MC_BEGIN(0, 1);
8018 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8019 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8020 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8022 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
8023 IEM_MC_ADVANCE_RIP_AND_FINISH();
8024 IEM_MC_END();
8025 }
8026}
8027
8028
8029/**
8030 * @opcode 0xc7
8031 */
8032FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
8033{
8034 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8035 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8036 IEMOP_RAISE_INVALID_OPCODE_RET();
8037 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
8038
8039 if (IEM_IS_MODRM_REG_MODE(bRm))
8040 {
8041 /* register access */
8042 switch (pVCpu->iem.s.enmEffOpSize)
8043 {
8044 case IEMMODE_16BIT:
8045 IEM_MC_BEGIN(0, 0);
8046 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8048 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8049 IEM_MC_ADVANCE_RIP_AND_FINISH();
8050 IEM_MC_END();
8051 break;
8052
8053 case IEMMODE_32BIT:
8054 IEM_MC_BEGIN(0, 0);
8055 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8057 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8058 IEM_MC_ADVANCE_RIP_AND_FINISH();
8059 IEM_MC_END();
8060 break;
8061
8062 case IEMMODE_64BIT:
8063 IEM_MC_BEGIN(0, 0);
8064 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8066 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8067 IEM_MC_ADVANCE_RIP_AND_FINISH();
8068 IEM_MC_END();
8069 break;
8070
8071 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8072 }
8073 }
8074 else
8075 {
8076 /* memory access. */
8077 switch (pVCpu->iem.s.enmEffOpSize)
8078 {
8079 case IEMMODE_16BIT:
8080 IEM_MC_BEGIN(0, 1);
8081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8083 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8085 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8086 IEM_MC_ADVANCE_RIP_AND_FINISH();
8087 IEM_MC_END();
8088 break;
8089
8090 case IEMMODE_32BIT:
8091 IEM_MC_BEGIN(0, 1);
8092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8094 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8096 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8097 IEM_MC_ADVANCE_RIP_AND_FINISH();
8098 IEM_MC_END();
8099 break;
8100
8101 case IEMMODE_64BIT:
8102 IEM_MC_BEGIN(0, 1);
8103 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8104 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8105 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8107 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8108 IEM_MC_ADVANCE_RIP_AND_FINISH();
8109 IEM_MC_END();
8110 break;
8111
8112 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8113 }
8114 }
8115}
8116
8117
8118
8119
8120/**
8121 * @opcode 0xc8
8122 */
8123FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8124{
8125 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8126 IEMOP_HLP_MIN_186();
8127 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8128 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
8129 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
8130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8131 IEM_MC_DEFER_TO_CIMPL_3_RET(0, iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
8132}
8133
8134
8135/**
8136 * @opcode 0xc9
8137 */
8138FNIEMOP_DEF(iemOp_leave)
8139{
8140 IEMOP_MNEMONIC(leave, "leave");
8141 IEMOP_HLP_MIN_186();
8142 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8144 IEM_MC_DEFER_TO_CIMPL_1_RET(0, iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
8145}
8146
8147
8148/**
8149 * @opcode 0xca
8150 */
8151FNIEMOP_DEF(iemOp_retf_Iw)
8152{
8153 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
8154 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8156 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
8157 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
8158}
8159
8160
8161/**
8162 * @opcode 0xcb
8163 */
8164FNIEMOP_DEF(iemOp_retf)
8165{
8166 IEMOP_MNEMONIC(retf, "retf");
8167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8168 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
8169 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
8170}
8171
8172
8173/**
8174 * @opcode 0xcc
8175 */
8176FNIEMOP_DEF(iemOp_int3)
8177{
8178 IEMOP_MNEMONIC(int3, "int3");
8179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8180 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8181 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8182 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
8183}
8184
8185
8186/**
8187 * @opcode 0xcd
8188 */
8189FNIEMOP_DEF(iemOp_int_Ib)
8190{
8191 IEMOP_MNEMONIC(int_Ib, "int Ib");
8192 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
8193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8194 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8195 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8196 iemCImpl_int, u8Int, IEMINT_INTN);
8197}
8198
8199
8200/**
8201 * @opcode 0xce
8202 */
8203FNIEMOP_DEF(iemOp_into)
8204{
8205 IEMOP_MNEMONIC(into, "into");
8206 IEMOP_HLP_NO_64BIT();
8207 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_CONDITIONAL
8208 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8209 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
8210}
8211
8212
8213/**
8214 * @opcode 0xcf
8215 */
8216FNIEMOP_DEF(iemOp_iret)
8217{
8218 IEMOP_MNEMONIC(iret, "iret");
8219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8220 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8221 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
8222 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
8223}
8224
8225
8226/**
8227 * @opcode 0xd0
8228 */
8229FNIEMOP_DEF(iemOp_Grp2_Eb_1)
8230{
8231 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8232 PCIEMOPSHIFTSIZES pImpl;
8233 switch (IEM_GET_MODRM_REG_8(bRm))
8234 {
8235 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
8236 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
8237 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
8238 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
8239 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
8240 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
8241 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
8242 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8243 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8244 }
8245 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8246
8247 if (IEM_IS_MODRM_REG_MODE(bRm))
8248 {
8249 /* register */
8250 IEM_MC_BEGIN(3, 0);
8251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8252 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8253 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8255 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8256 IEM_MC_REF_EFLAGS(pEFlags);
8257 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8258 IEM_MC_ADVANCE_RIP_AND_FINISH();
8259 IEM_MC_END();
8260 }
8261 else
8262 {
8263 /* memory */
8264 IEM_MC_BEGIN(3, 3);
8265 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8266 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8267 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8268 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8269 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8270
8271 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8273 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8274 IEM_MC_FETCH_EFLAGS(EFlags);
8275 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8276
8277 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8278 IEM_MC_COMMIT_EFLAGS(EFlags);
8279 IEM_MC_ADVANCE_RIP_AND_FINISH();
8280 IEM_MC_END();
8281 }
8282}
8283
8284
8285
8286/**
8287 * @opcode 0xd1
8288 */
8289FNIEMOP_DEF(iemOp_Grp2_Ev_1)
8290{
8291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8292 PCIEMOPSHIFTSIZES pImpl;
8293 switch (IEM_GET_MODRM_REG_8(bRm))
8294 {
8295 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
8296 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
8297 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
8298 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
8299 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
8300 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
8301 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
8302 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8303 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8304 }
8305 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8306
8307 if (IEM_IS_MODRM_REG_MODE(bRm))
8308 {
8309 /* register */
8310 switch (pVCpu->iem.s.enmEffOpSize)
8311 {
8312 case IEMMODE_16BIT:
8313 IEM_MC_BEGIN(3, 0);
8314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8315 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8316 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8317 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8318 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8319 IEM_MC_REF_EFLAGS(pEFlags);
8320 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8321 IEM_MC_ADVANCE_RIP_AND_FINISH();
8322 IEM_MC_END();
8323 break;
8324
8325 case IEMMODE_32BIT:
8326 IEM_MC_BEGIN(3, 0);
8327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8328 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8329 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8330 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8331 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8332 IEM_MC_REF_EFLAGS(pEFlags);
8333 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8334 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8335 IEM_MC_ADVANCE_RIP_AND_FINISH();
8336 IEM_MC_END();
8337 break;
8338
8339 case IEMMODE_64BIT:
8340 IEM_MC_BEGIN(3, 0);
8341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8342 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8343 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8344 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8345 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8346 IEM_MC_REF_EFLAGS(pEFlags);
8347 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8348 IEM_MC_ADVANCE_RIP_AND_FINISH();
8349 IEM_MC_END();
8350 break;
8351
8352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8353 }
8354 }
8355 else
8356 {
8357 /* memory */
8358 switch (pVCpu->iem.s.enmEffOpSize)
8359 {
8360 case IEMMODE_16BIT:
8361 IEM_MC_BEGIN(3, 2);
8362 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8363 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8364 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8366
8367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8369 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8370 IEM_MC_FETCH_EFLAGS(EFlags);
8371 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8372
8373 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8374 IEM_MC_COMMIT_EFLAGS(EFlags);
8375 IEM_MC_ADVANCE_RIP_AND_FINISH();
8376 IEM_MC_END();
8377 break;
8378
8379 case IEMMODE_32BIT:
8380 IEM_MC_BEGIN(3, 2);
8381 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8382 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8383 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8385
8386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8388 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8389 IEM_MC_FETCH_EFLAGS(EFlags);
8390 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8391
8392 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8393 IEM_MC_COMMIT_EFLAGS(EFlags);
8394 IEM_MC_ADVANCE_RIP_AND_FINISH();
8395 IEM_MC_END();
8396 break;
8397
8398 case IEMMODE_64BIT:
8399 IEM_MC_BEGIN(3, 2);
8400 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8401 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8402 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8404
8405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8407 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8408 IEM_MC_FETCH_EFLAGS(EFlags);
8409 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8410
8411 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8412 IEM_MC_COMMIT_EFLAGS(EFlags);
8413 IEM_MC_ADVANCE_RIP_AND_FINISH();
8414 IEM_MC_END();
8415 break;
8416
8417 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8418 }
8419 }
8420}
8421
8422
8423/**
8424 * @opcode 0xd2
8425 */
8426FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
8427{
8428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8429 PCIEMOPSHIFTSIZES pImpl;
8430 switch (IEM_GET_MODRM_REG_8(bRm))
8431 {
8432 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
8433 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
8434 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
8435 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
8436 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
8437 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
8438 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
8439 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8440 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
8441 }
8442 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8443
8444 if (IEM_IS_MODRM_REG_MODE(bRm))
8445 {
8446 /* register */
8447 IEM_MC_BEGIN(3, 0);
8448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8449 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8450 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8451 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8452 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8453 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8454 IEM_MC_REF_EFLAGS(pEFlags);
8455 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8456 IEM_MC_ADVANCE_RIP_AND_FINISH();
8457 IEM_MC_END();
8458 }
8459 else
8460 {
8461 /* memory */
8462 IEM_MC_BEGIN(3, 3);
8463 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8464 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8465 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8467 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8468
8469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8471 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8472 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8473 IEM_MC_FETCH_EFLAGS(EFlags);
8474 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8475
8476 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8477 IEM_MC_COMMIT_EFLAGS(EFlags);
8478 IEM_MC_ADVANCE_RIP_AND_FINISH();
8479 IEM_MC_END();
8480 }
8481}
8482
8483
8484/**
8485 * @opcode 0xd3
8486 */
8487FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
8488{
8489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8490 PCIEMOPSHIFTSIZES pImpl;
8491 switch (IEM_GET_MODRM_REG_8(bRm))
8492 {
8493 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
8494 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
8495 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
8496 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
8497 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
8498 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
8499 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
8500 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8501 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8502 }
8503 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8504
8505 if (IEM_IS_MODRM_REG_MODE(bRm))
8506 {
8507 /* register */
8508 switch (pVCpu->iem.s.enmEffOpSize)
8509 {
8510 case IEMMODE_16BIT:
8511 IEM_MC_BEGIN(3, 0);
8512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8513 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8514 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8515 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8516 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8517 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8518 IEM_MC_REF_EFLAGS(pEFlags);
8519 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8520 IEM_MC_ADVANCE_RIP_AND_FINISH();
8521 IEM_MC_END();
8522 break;
8523
8524 case IEMMODE_32BIT:
8525 IEM_MC_BEGIN(3, 0);
8526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8527 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8528 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8529 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8530 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8531 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8532 IEM_MC_REF_EFLAGS(pEFlags);
8533 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8534 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8535 IEM_MC_ADVANCE_RIP_AND_FINISH();
8536 IEM_MC_END();
8537 break;
8538
8539 case IEMMODE_64BIT:
8540 IEM_MC_BEGIN(3, 0);
8541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8542 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8543 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8544 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8545 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8546 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8547 IEM_MC_REF_EFLAGS(pEFlags);
8548 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8549 IEM_MC_ADVANCE_RIP_AND_FINISH();
8550 IEM_MC_END();
8551 break;
8552
8553 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8554 }
8555 }
8556 else
8557 {
8558 /* memory */
8559 switch (pVCpu->iem.s.enmEffOpSize)
8560 {
8561 case IEMMODE_16BIT:
8562 IEM_MC_BEGIN(3, 2);
8563 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8564 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8565 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8567
8568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8570 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8571 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8572 IEM_MC_FETCH_EFLAGS(EFlags);
8573 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8574
8575 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8576 IEM_MC_COMMIT_EFLAGS(EFlags);
8577 IEM_MC_ADVANCE_RIP_AND_FINISH();
8578 IEM_MC_END();
8579 break;
8580
8581 case IEMMODE_32BIT:
8582 IEM_MC_BEGIN(3, 2);
8583 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8584 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8585 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8587
8588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8590 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8591 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8592 IEM_MC_FETCH_EFLAGS(EFlags);
8593 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8594
8595 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8596 IEM_MC_COMMIT_EFLAGS(EFlags);
8597 IEM_MC_ADVANCE_RIP_AND_FINISH();
8598 IEM_MC_END();
8599 break;
8600
8601 case IEMMODE_64BIT:
8602 IEM_MC_BEGIN(3, 2);
8603 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8604 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8605 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8607
8608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8610 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8611 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8612 IEM_MC_FETCH_EFLAGS(EFlags);
8613 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8614
8615 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8616 IEM_MC_COMMIT_EFLAGS(EFlags);
8617 IEM_MC_ADVANCE_RIP_AND_FINISH();
8618 IEM_MC_END();
8619 break;
8620
8621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8622 }
8623 }
8624}
8625
8626/**
8627 * @opcode 0xd4
8628 */
8629FNIEMOP_DEF(iemOp_aam_Ib)
8630{
8631 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
8632 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
8633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8634 IEMOP_HLP_NO_64BIT();
8635 if (!bImm)
8636 IEMOP_RAISE_DIVIDE_ERROR_RET();
8637 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aam, bImm);
8638}
8639
8640
8641/**
8642 * @opcode 0xd5
8643 */
8644FNIEMOP_DEF(iemOp_aad_Ib)
8645{
8646 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
8647 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
8648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8649 IEMOP_HLP_NO_64BIT();
8650 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aad, bImm);
8651}
8652
8653
8654/**
8655 * @opcode 0xd6
8656 */
8657FNIEMOP_DEF(iemOp_salc)
8658{
8659 IEMOP_MNEMONIC(salc, "salc");
8660 IEMOP_HLP_NO_64BIT();
8661
8662 IEM_MC_BEGIN(0, 0);
8663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8664 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8665 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
8666 } IEM_MC_ELSE() {
8667 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
8668 } IEM_MC_ENDIF();
8669 IEM_MC_ADVANCE_RIP_AND_FINISH();
8670 IEM_MC_END();
8671}
8672
8673
8674/**
8675 * @opcode 0xd7
8676 */
8677FNIEMOP_DEF(iemOp_xlat)
8678{
8679 IEMOP_MNEMONIC(xlat, "xlat");
8680 switch (pVCpu->iem.s.enmEffAddrMode)
8681 {
8682 case IEMMODE_16BIT:
8683 IEM_MC_BEGIN(2, 0);
8684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8685 IEM_MC_LOCAL(uint8_t, u8Tmp);
8686 IEM_MC_LOCAL(uint16_t, u16Addr);
8687 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
8688 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
8689 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
8690 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8691 IEM_MC_ADVANCE_RIP_AND_FINISH();
8692 IEM_MC_END();
8693 break;
8694
8695 case IEMMODE_32BIT:
8696 IEM_MC_BEGIN(2, 0);
8697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8698 IEM_MC_LOCAL(uint8_t, u8Tmp);
8699 IEM_MC_LOCAL(uint32_t, u32Addr);
8700 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
8701 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
8702 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
8703 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8704 IEM_MC_ADVANCE_RIP_AND_FINISH();
8705 IEM_MC_END();
8706 break;
8707
8708 case IEMMODE_64BIT:
8709 IEM_MC_BEGIN(2, 0);
8710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8711 IEM_MC_LOCAL(uint8_t, u8Tmp);
8712 IEM_MC_LOCAL(uint64_t, u64Addr);
8713 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
8714 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
8715 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
8716 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8717 IEM_MC_ADVANCE_RIP_AND_FINISH();
8718 IEM_MC_END();
8719 break;
8720
8721 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8722 }
8723}
8724
8725
8726/**
8727 * Common worker for FPU instructions working on ST0 and STn, and storing the
8728 * result in ST0.
8729 *
8730 * @param bRm Mod R/M byte.
8731 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8732 */
8733FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8734{
8735 IEM_MC_BEGIN(3, 1);
8736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8737 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8738 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8739 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8740 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8741
8742 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8743 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8744 IEM_MC_PREPARE_FPU_USAGE();
8745 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8746 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8747 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8748 } IEM_MC_ELSE() {
8749 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8750 } IEM_MC_ENDIF();
8751 IEM_MC_ADVANCE_RIP_AND_FINISH();
8752
8753 IEM_MC_END();
8754}
8755
8756
8757/**
8758 * Common worker for FPU instructions working on ST0 and STn, and only affecting
8759 * flags.
8760 *
8761 * @param bRm Mod R/M byte.
8762 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8763 */
8764FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8765{
8766 IEM_MC_BEGIN(3, 1);
8767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8768 IEM_MC_LOCAL(uint16_t, u16Fsw);
8769 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8770 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8771 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8772
8773 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8774 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8775 IEM_MC_PREPARE_FPU_USAGE();
8776 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8777 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8778 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8779 } IEM_MC_ELSE() {
8780 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8781 } IEM_MC_ENDIF();
8782 IEM_MC_ADVANCE_RIP_AND_FINISH();
8783
8784 IEM_MC_END();
8785}
8786
8787
8788/**
8789 * Common worker for FPU instructions working on ST0 and STn, only affecting
8790 * flags, and popping when done.
8791 *
8792 * @param bRm Mod R/M byte.
8793 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8794 */
8795FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8796{
8797 IEM_MC_BEGIN(3, 1);
8798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8799 IEM_MC_LOCAL(uint16_t, u16Fsw);
8800 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8801 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8802 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8803
8804 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8805 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8806 IEM_MC_PREPARE_FPU_USAGE();
8807 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8808 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8809 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8810 } IEM_MC_ELSE() {
8811 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8812 } IEM_MC_ENDIF();
8813 IEM_MC_ADVANCE_RIP_AND_FINISH();
8814
8815 IEM_MC_END();
8816}
8817
8818
8819/** Opcode 0xd8 11/0. */
8820FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
8821{
8822 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
8823 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
8824}
8825
8826
8827/** Opcode 0xd8 11/1. */
8828FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
8829{
8830 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
8831 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
8832}
8833
8834
8835/** Opcode 0xd8 11/2. */
8836FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
8837{
8838 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
8839 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
8840}
8841
8842
8843/** Opcode 0xd8 11/3. */
8844FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
8845{
8846 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
8847 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
8848}
8849
8850
8851/** Opcode 0xd8 11/4. */
8852FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
8853{
8854 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
8855 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
8856}
8857
8858
8859/** Opcode 0xd8 11/5. */
8860FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
8861{
8862 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
8863 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
8864}
8865
8866
8867/** Opcode 0xd8 11/6. */
8868FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
8869{
8870 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
8871 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
8872}
8873
8874
8875/** Opcode 0xd8 11/7. */
8876FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
8877{
8878 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
8879 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
8880}
8881
8882
8883/**
8884 * Common worker for FPU instructions working on ST0 and an m32r, and storing
8885 * the result in ST0.
8886 *
8887 * @param bRm Mod R/M byte.
8888 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8889 */
8890FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
8891{
8892 IEM_MC_BEGIN(3, 3);
8893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8894 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8895 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8896 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8897 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8898 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8899
8900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8902
8903 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8904 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8905 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8906
8907 IEM_MC_PREPARE_FPU_USAGE();
8908 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8909 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
8910 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8911 } IEM_MC_ELSE() {
8912 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8913 } IEM_MC_ENDIF();
8914 IEM_MC_ADVANCE_RIP_AND_FINISH();
8915
8916 IEM_MC_END();
8917}
8918
8919
8920/** Opcode 0xd8 !11/0. */
8921FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
8922{
8923 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
8924 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
8925}
8926
8927
8928/** Opcode 0xd8 !11/1. */
8929FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
8930{
8931 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
8932 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
8933}
8934
8935
8936/** Opcode 0xd8 !11/2. */
8937FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
8938{
8939 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
8940
8941 IEM_MC_BEGIN(3, 3);
8942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8943 IEM_MC_LOCAL(uint16_t, u16Fsw);
8944 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8945 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8946 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8947 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8948
8949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8951
8952 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8953 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8954 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8955
8956 IEM_MC_PREPARE_FPU_USAGE();
8957 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8958 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8959 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8960 } IEM_MC_ELSE() {
8961 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8962 } IEM_MC_ENDIF();
8963 IEM_MC_ADVANCE_RIP_AND_FINISH();
8964
8965 IEM_MC_END();
8966}
8967
8968
8969/** Opcode 0xd8 !11/3. */
8970FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
8971{
8972 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
8973
8974 IEM_MC_BEGIN(3, 3);
8975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8976 IEM_MC_LOCAL(uint16_t, u16Fsw);
8977 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8978 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8979 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8980 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8981
8982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8984
8985 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8986 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8987 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8988
8989 IEM_MC_PREPARE_FPU_USAGE();
8990 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8991 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8992 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8993 } IEM_MC_ELSE() {
8994 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8995 } IEM_MC_ENDIF();
8996 IEM_MC_ADVANCE_RIP_AND_FINISH();
8997
8998 IEM_MC_END();
8999}
9000
9001
9002/** Opcode 0xd8 !11/4. */
9003FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
9004{
9005 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
9006 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
9007}
9008
9009
9010/** Opcode 0xd8 !11/5. */
9011FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
9012{
9013 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
9014 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
9015}
9016
9017
9018/** Opcode 0xd8 !11/6. */
9019FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
9020{
9021 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
9022 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
9023}
9024
9025
9026/** Opcode 0xd8 !11/7. */
9027FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
9028{
9029 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
9030 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
9031}
9032
9033
9034/**
9035 * @opcode 0xd8
9036 */
9037FNIEMOP_DEF(iemOp_EscF0)
9038{
9039 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9040 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
9041
9042 if (IEM_IS_MODRM_REG_MODE(bRm))
9043 {
9044 switch (IEM_GET_MODRM_REG_8(bRm))
9045 {
9046 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
9047 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
9048 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
9049 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9050 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
9051 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
9052 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
9053 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
9054 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9055 }
9056 }
9057 else
9058 {
9059 switch (IEM_GET_MODRM_REG_8(bRm))
9060 {
9061 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
9062 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
9063 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
9064 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
9065 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
9066 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
9067 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
9068 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
9069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9070 }
9071 }
9072}
9073
9074
9075/** Opcode 0xd9 /0 mem32real
9076 * @sa iemOp_fld_m64r */
9077FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
9078{
9079 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
9080
9081 IEM_MC_BEGIN(2, 3);
9082 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9083 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9084 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
9085 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9086 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
9087
9088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9090
9091 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9092 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9093 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9094 IEM_MC_PREPARE_FPU_USAGE();
9095 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9096 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
9097 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9098 } IEM_MC_ELSE() {
9099 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9100 } IEM_MC_ENDIF();
9101 IEM_MC_ADVANCE_RIP_AND_FINISH();
9102
9103 IEM_MC_END();
9104}
9105
9106
9107/** Opcode 0xd9 !11/2 mem32real */
9108FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
9109{
9110 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
9111 IEM_MC_BEGIN(3, 2);
9112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9113 IEM_MC_LOCAL(uint16_t, u16Fsw);
9114 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9115 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9116 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9117
9118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9120 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9121 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9122
9123 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9124 IEM_MC_PREPARE_FPU_USAGE();
9125 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9126 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9127 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9128 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9129 } IEM_MC_ELSE() {
9130 IEM_MC_IF_FCW_IM() {
9131 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9132 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9133 } IEM_MC_ENDIF();
9134 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9135 } IEM_MC_ENDIF();
9136 IEM_MC_ADVANCE_RIP_AND_FINISH();
9137
9138 IEM_MC_END();
9139}
9140
9141
9142/** Opcode 0xd9 !11/3 */
9143FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
9144{
9145 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
9146 IEM_MC_BEGIN(3, 2);
9147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9148 IEM_MC_LOCAL(uint16_t, u16Fsw);
9149 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9150 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9151 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9152
9153 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9155 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9156 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9157
9158 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9159 IEM_MC_PREPARE_FPU_USAGE();
9160 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9161 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9162 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9163 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9164 } IEM_MC_ELSE() {
9165 IEM_MC_IF_FCW_IM() {
9166 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9167 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9168 } IEM_MC_ENDIF();
9169 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9170 } IEM_MC_ENDIF();
9171 IEM_MC_ADVANCE_RIP_AND_FINISH();
9172
9173 IEM_MC_END();
9174}
9175
9176
9177/** Opcode 0xd9 !11/4 */
9178FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
9179{
9180 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
9181 IEM_MC_BEGIN(3, 0);
9182 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9183 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9184 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9187 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9188 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9189 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9190 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9191 IEM_MC_END();
9192}
9193
9194
9195/** Opcode 0xd9 !11/5 */
9196FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
9197{
9198 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
9199 IEM_MC_BEGIN(1, 1);
9200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9201 IEM_MC_ARG(uint16_t, u16Fsw, 0);
9202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9204 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9205 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9206 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9207 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, iemCImpl_fldcw, u16Fsw);
9208 IEM_MC_END();
9209}
9210
9211
9212/** Opcode 0xd9 !11/6 */
9213FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
9214{
9215 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
9216 IEM_MC_BEGIN(3, 0);
9217 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9218 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9219 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9222 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9223 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9224 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9225 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
9226 IEM_MC_END();
9227}
9228
9229
9230/** Opcode 0xd9 !11/7 */
9231FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
9232{
9233 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
9234 IEM_MC_BEGIN(2, 0);
9235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9236 IEM_MC_LOCAL(uint16_t, u16Fcw);
9237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9239 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9240 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9241 IEM_MC_FETCH_FCW(u16Fcw);
9242 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
9243 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9244 IEM_MC_END();
9245}
9246
9247
9248/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
9249FNIEMOP_DEF(iemOp_fnop)
9250{
9251 IEMOP_MNEMONIC(fnop, "fnop");
9252 IEM_MC_BEGIN(0, 0);
9253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9254 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9255 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9256 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9257 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
9258 * intel optimizations. Investigate. */
9259 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9260 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9261 IEM_MC_END();
9262}
9263
9264
9265/** Opcode 0xd9 11/0 stN */
9266FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
9267{
9268 IEMOP_MNEMONIC(fld_stN, "fld stN");
9269 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9270 * indicates that it does. */
9271 IEM_MC_BEGIN(0, 2);
9272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9273 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9274 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9275 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9276 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9277
9278 IEM_MC_PREPARE_FPU_USAGE();
9279 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
9280 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9281 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9282 } IEM_MC_ELSE() {
9283 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
9284 } IEM_MC_ENDIF();
9285
9286 IEM_MC_ADVANCE_RIP_AND_FINISH();
9287 IEM_MC_END();
9288}
9289
9290
9291/** Opcode 0xd9 11/3 stN */
9292FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
9293{
9294 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
9295 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9296 * indicates that it does. */
9297 IEM_MC_BEGIN(2, 3);
9298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9299 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
9300 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
9301 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9302 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
9303 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
9304 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9305 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9306
9307 IEM_MC_PREPARE_FPU_USAGE();
9308 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9309 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
9310 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
9311 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9312 } IEM_MC_ELSE() {
9313 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
9314 } IEM_MC_ENDIF();
9315
9316 IEM_MC_ADVANCE_RIP_AND_FINISH();
9317 IEM_MC_END();
9318}
9319
9320
9321/** Opcode 0xd9 11/4, 0xdd 11/2. */
9322FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
9323{
9324 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
9325
9326 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
9327 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
9328 if (!iDstReg)
9329 {
9330 IEM_MC_BEGIN(0, 1);
9331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9332 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
9333 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9334 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9335
9336 IEM_MC_PREPARE_FPU_USAGE();
9337 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
9338 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9339 } IEM_MC_ELSE() {
9340 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
9341 } IEM_MC_ENDIF();
9342
9343 IEM_MC_ADVANCE_RIP_AND_FINISH();
9344 IEM_MC_END();
9345 }
9346 else
9347 {
9348 IEM_MC_BEGIN(0, 2);
9349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9350 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9351 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9352 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9353 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9354
9355 IEM_MC_PREPARE_FPU_USAGE();
9356 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9357 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9358 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
9359 } IEM_MC_ELSE() {
9360 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
9361 } IEM_MC_ENDIF();
9362
9363 IEM_MC_ADVANCE_RIP_AND_FINISH();
9364 IEM_MC_END();
9365 }
9366}
9367
9368
9369/**
9370 * Common worker for FPU instructions working on ST0 and replaces it with the
9371 * result, i.e. unary operators.
9372 *
9373 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9374 */
9375FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
9376{
9377 IEM_MC_BEGIN(2, 1);
9378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9379 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9380 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9381 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9382
9383 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9384 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9385 IEM_MC_PREPARE_FPU_USAGE();
9386 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9387 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
9388 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9389 } IEM_MC_ELSE() {
9390 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9391 } IEM_MC_ENDIF();
9392 IEM_MC_ADVANCE_RIP_AND_FINISH();
9393
9394 IEM_MC_END();
9395}
9396
9397
9398/** Opcode 0xd9 0xe0. */
9399FNIEMOP_DEF(iemOp_fchs)
9400{
9401 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
9402 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
9403}
9404
9405
9406/** Opcode 0xd9 0xe1. */
9407FNIEMOP_DEF(iemOp_fabs)
9408{
9409 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
9410 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
9411}
9412
9413
9414/** Opcode 0xd9 0xe4. */
9415FNIEMOP_DEF(iemOp_ftst)
9416{
9417 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
9418 IEM_MC_BEGIN(2, 1);
9419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9420 IEM_MC_LOCAL(uint16_t, u16Fsw);
9421 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9422 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9423
9424 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9425 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9426 IEM_MC_PREPARE_FPU_USAGE();
9427 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9428 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
9429 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9430 } IEM_MC_ELSE() {
9431 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9432 } IEM_MC_ENDIF();
9433 IEM_MC_ADVANCE_RIP_AND_FINISH();
9434
9435 IEM_MC_END();
9436}
9437
9438
9439/** Opcode 0xd9 0xe5. */
9440FNIEMOP_DEF(iemOp_fxam)
9441{
9442 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
9443 IEM_MC_BEGIN(2, 1);
9444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9445 IEM_MC_LOCAL(uint16_t, u16Fsw);
9446 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9447 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9448
9449 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9450 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9451 IEM_MC_PREPARE_FPU_USAGE();
9452 IEM_MC_REF_FPUREG(pr80Value, 0);
9453 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
9454 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9455 IEM_MC_ADVANCE_RIP_AND_FINISH();
9456
9457 IEM_MC_END();
9458}
9459
9460
9461/**
9462 * Common worker for FPU instructions pushing a constant onto the FPU stack.
9463 *
9464 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9465 */
9466FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
9467{
9468 IEM_MC_BEGIN(1, 1);
9469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9470 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9471 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9472
9473 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9474 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9475 IEM_MC_PREPARE_FPU_USAGE();
9476 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9477 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
9478 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9479 } IEM_MC_ELSE() {
9480 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
9481 } IEM_MC_ENDIF();
9482 IEM_MC_ADVANCE_RIP_AND_FINISH();
9483
9484 IEM_MC_END();
9485}
9486
9487
9488/** Opcode 0xd9 0xe8. */
9489FNIEMOP_DEF(iemOp_fld1)
9490{
9491 IEMOP_MNEMONIC(fld1, "fld1");
9492 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
9493}
9494
9495
9496/** Opcode 0xd9 0xe9. */
9497FNIEMOP_DEF(iemOp_fldl2t)
9498{
9499 IEMOP_MNEMONIC(fldl2t, "fldl2t");
9500 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
9501}
9502
9503
9504/** Opcode 0xd9 0xea. */
9505FNIEMOP_DEF(iemOp_fldl2e)
9506{
9507 IEMOP_MNEMONIC(fldl2e, "fldl2e");
9508 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
9509}
9510
9511/** Opcode 0xd9 0xeb. */
9512FNIEMOP_DEF(iemOp_fldpi)
9513{
9514 IEMOP_MNEMONIC(fldpi, "fldpi");
9515 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
9516}
9517
9518
9519/** Opcode 0xd9 0xec. */
9520FNIEMOP_DEF(iemOp_fldlg2)
9521{
9522 IEMOP_MNEMONIC(fldlg2, "fldlg2");
9523 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
9524}
9525
9526/** Opcode 0xd9 0xed. */
9527FNIEMOP_DEF(iemOp_fldln2)
9528{
9529 IEMOP_MNEMONIC(fldln2, "fldln2");
9530 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
9531}
9532
9533
9534/** Opcode 0xd9 0xee. */
9535FNIEMOP_DEF(iemOp_fldz)
9536{
9537 IEMOP_MNEMONIC(fldz, "fldz");
9538 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
9539}
9540
9541
9542/** Opcode 0xd9 0xf0.
9543 *
9544 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
9545 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
9546 * to produce proper results for +Inf and -Inf.
9547 *
9548 * This is probably usful in the implementation pow() and similar.
9549 */
9550FNIEMOP_DEF(iemOp_f2xm1)
9551{
9552 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
9553 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
9554}
9555
9556
9557/**
9558 * Common worker for FPU instructions working on STn and ST0, storing the result
9559 * in STn, and popping the stack unless IE, DE or ZE was raised.
9560 *
9561 * @param bRm Mod R/M byte.
9562 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9563 */
9564FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9565{
9566 IEM_MC_BEGIN(3, 1);
9567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9568 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9569 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9570 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9571 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9572
9573 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9574 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9575
9576 IEM_MC_PREPARE_FPU_USAGE();
9577 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
9578 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9579 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
9580 } IEM_MC_ELSE() {
9581 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
9582 } IEM_MC_ENDIF();
9583 IEM_MC_ADVANCE_RIP_AND_FINISH();
9584
9585 IEM_MC_END();
9586}
9587
9588
9589/** Opcode 0xd9 0xf1. */
9590FNIEMOP_DEF(iemOp_fyl2x)
9591{
9592 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
9593 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
9594}
9595
9596
9597/**
9598 * Common worker for FPU instructions working on ST0 and having two outputs, one
9599 * replacing ST0 and one pushed onto the stack.
9600 *
9601 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9602 */
9603FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
9604{
9605 IEM_MC_BEGIN(2, 1);
9606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9607 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
9608 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
9609 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9610
9611 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9612 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9613 IEM_MC_PREPARE_FPU_USAGE();
9614 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9615 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
9616 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
9617 } IEM_MC_ELSE() {
9618 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
9619 } IEM_MC_ENDIF();
9620 IEM_MC_ADVANCE_RIP_AND_FINISH();
9621
9622 IEM_MC_END();
9623}
9624
9625
9626/** Opcode 0xd9 0xf2. */
9627FNIEMOP_DEF(iemOp_fptan)
9628{
9629 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
9630 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
9631}
9632
9633
9634/** Opcode 0xd9 0xf3. */
9635FNIEMOP_DEF(iemOp_fpatan)
9636{
9637 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
9638 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
9639}
9640
9641
9642/** Opcode 0xd9 0xf4. */
9643FNIEMOP_DEF(iemOp_fxtract)
9644{
9645 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
9646 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
9647}
9648
9649
9650/** Opcode 0xd9 0xf5. */
9651FNIEMOP_DEF(iemOp_fprem1)
9652{
9653 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
9654 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
9655}
9656
9657
9658/** Opcode 0xd9 0xf6. */
9659FNIEMOP_DEF(iemOp_fdecstp)
9660{
9661 IEMOP_MNEMONIC(fdecstp, "fdecstp");
9662 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9663 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9664 * FINCSTP and FDECSTP. */
9665 IEM_MC_BEGIN(0,0);
9666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9667
9668 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9669 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9670
9671 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9672 IEM_MC_FPU_STACK_DEC_TOP();
9673 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9674
9675 IEM_MC_ADVANCE_RIP_AND_FINISH();
9676 IEM_MC_END();
9677}
9678
9679
9680/** Opcode 0xd9 0xf7. */
9681FNIEMOP_DEF(iemOp_fincstp)
9682{
9683 IEMOP_MNEMONIC(fincstp, "fincstp");
9684 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9685 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9686 * FINCSTP and FDECSTP. */
9687 IEM_MC_BEGIN(0,0);
9688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9689
9690 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9691 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9692
9693 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9694 IEM_MC_FPU_STACK_INC_TOP();
9695 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9696
9697 IEM_MC_ADVANCE_RIP_AND_FINISH();
9698 IEM_MC_END();
9699}
9700
9701
9702/** Opcode 0xd9 0xf8. */
9703FNIEMOP_DEF(iemOp_fprem)
9704{
9705 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
9706 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
9707}
9708
9709
9710/** Opcode 0xd9 0xf9. */
9711FNIEMOP_DEF(iemOp_fyl2xp1)
9712{
9713 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
9714 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
9715}
9716
9717
9718/** Opcode 0xd9 0xfa. */
9719FNIEMOP_DEF(iemOp_fsqrt)
9720{
9721 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
9722 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
9723}
9724
9725
9726/** Opcode 0xd9 0xfb. */
9727FNIEMOP_DEF(iemOp_fsincos)
9728{
9729 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
9730 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
9731}
9732
9733
9734/** Opcode 0xd9 0xfc. */
9735FNIEMOP_DEF(iemOp_frndint)
9736{
9737 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
9738 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
9739}
9740
9741
9742/** Opcode 0xd9 0xfd. */
9743FNIEMOP_DEF(iemOp_fscale)
9744{
9745 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
9746 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
9747}
9748
9749
9750/** Opcode 0xd9 0xfe. */
9751FNIEMOP_DEF(iemOp_fsin)
9752{
9753 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
9754 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
9755}
9756
9757
9758/** Opcode 0xd9 0xff. */
9759FNIEMOP_DEF(iemOp_fcos)
9760{
9761 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
9762 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
9763}
9764
9765
9766/** Used by iemOp_EscF1. */
9767IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
9768{
9769 /* 0xe0 */ iemOp_fchs,
9770 /* 0xe1 */ iemOp_fabs,
9771 /* 0xe2 */ iemOp_Invalid,
9772 /* 0xe3 */ iemOp_Invalid,
9773 /* 0xe4 */ iemOp_ftst,
9774 /* 0xe5 */ iemOp_fxam,
9775 /* 0xe6 */ iemOp_Invalid,
9776 /* 0xe7 */ iemOp_Invalid,
9777 /* 0xe8 */ iemOp_fld1,
9778 /* 0xe9 */ iemOp_fldl2t,
9779 /* 0xea */ iemOp_fldl2e,
9780 /* 0xeb */ iemOp_fldpi,
9781 /* 0xec */ iemOp_fldlg2,
9782 /* 0xed */ iemOp_fldln2,
9783 /* 0xee */ iemOp_fldz,
9784 /* 0xef */ iemOp_Invalid,
9785 /* 0xf0 */ iemOp_f2xm1,
9786 /* 0xf1 */ iemOp_fyl2x,
9787 /* 0xf2 */ iemOp_fptan,
9788 /* 0xf3 */ iemOp_fpatan,
9789 /* 0xf4 */ iemOp_fxtract,
9790 /* 0xf5 */ iemOp_fprem1,
9791 /* 0xf6 */ iemOp_fdecstp,
9792 /* 0xf7 */ iemOp_fincstp,
9793 /* 0xf8 */ iemOp_fprem,
9794 /* 0xf9 */ iemOp_fyl2xp1,
9795 /* 0xfa */ iemOp_fsqrt,
9796 /* 0xfb */ iemOp_fsincos,
9797 /* 0xfc */ iemOp_frndint,
9798 /* 0xfd */ iemOp_fscale,
9799 /* 0xfe */ iemOp_fsin,
9800 /* 0xff */ iemOp_fcos
9801};
9802
9803
9804/**
9805 * @opcode 0xd9
9806 */
9807FNIEMOP_DEF(iemOp_EscF1)
9808{
9809 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9810 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
9811
9812 if (IEM_IS_MODRM_REG_MODE(bRm))
9813 {
9814 switch (IEM_GET_MODRM_REG_8(bRm))
9815 {
9816 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
9817 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
9818 case 2:
9819 if (bRm == 0xd0)
9820 return FNIEMOP_CALL(iemOp_fnop);
9821 IEMOP_RAISE_INVALID_OPCODE_RET();
9822 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
9823 case 4:
9824 case 5:
9825 case 6:
9826 case 7:
9827 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
9828 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
9829 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9830 }
9831 }
9832 else
9833 {
9834 switch (IEM_GET_MODRM_REG_8(bRm))
9835 {
9836 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
9837 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
9838 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
9839 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
9840 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
9841 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
9842 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
9843 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
9844 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9845 }
9846 }
9847}
9848
9849
9850/** Opcode 0xda 11/0. */
9851FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
9852{
9853 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
9854 IEM_MC_BEGIN(0, 1);
9855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9856 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9857
9858 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9859 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9860
9861 IEM_MC_PREPARE_FPU_USAGE();
9862 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9863 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9864 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9865 } IEM_MC_ENDIF();
9866 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9867 } IEM_MC_ELSE() {
9868 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9869 } IEM_MC_ENDIF();
9870 IEM_MC_ADVANCE_RIP_AND_FINISH();
9871
9872 IEM_MC_END();
9873}
9874
9875
9876/** Opcode 0xda 11/1. */
9877FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
9878{
9879 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
9880 IEM_MC_BEGIN(0, 1);
9881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9882 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9883
9884 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9885 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9886
9887 IEM_MC_PREPARE_FPU_USAGE();
9888 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9889 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9890 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9891 } IEM_MC_ENDIF();
9892 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9893 } IEM_MC_ELSE() {
9894 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9895 } IEM_MC_ENDIF();
9896 IEM_MC_ADVANCE_RIP_AND_FINISH();
9897
9898 IEM_MC_END();
9899}
9900
9901
9902/** Opcode 0xda 11/2. */
9903FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
9904{
9905 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
9906 IEM_MC_BEGIN(0, 1);
9907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9908 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9909
9910 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9911 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9912
9913 IEM_MC_PREPARE_FPU_USAGE();
9914 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9915 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9916 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9917 } IEM_MC_ENDIF();
9918 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9919 } IEM_MC_ELSE() {
9920 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9921 } IEM_MC_ENDIF();
9922 IEM_MC_ADVANCE_RIP_AND_FINISH();
9923
9924 IEM_MC_END();
9925}
9926
9927
9928/** Opcode 0xda 11/3. */
9929FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
9930{
9931 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
9932 IEM_MC_BEGIN(0, 1);
9933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9934 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9935
9936 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9937 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9938
9939 IEM_MC_PREPARE_FPU_USAGE();
9940 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9941 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9942 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9943 } IEM_MC_ENDIF();
9944 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9945 } IEM_MC_ELSE() {
9946 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9947 } IEM_MC_ENDIF();
9948 IEM_MC_ADVANCE_RIP_AND_FINISH();
9949
9950 IEM_MC_END();
9951}
9952
9953
9954/**
9955 * Common worker for FPU instructions working on ST0 and ST1, only affecting
9956 * flags, and popping twice when done.
9957 *
9958 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9959 */
9960FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9961{
9962 IEM_MC_BEGIN(3, 1);
9963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9964 IEM_MC_LOCAL(uint16_t, u16Fsw);
9965 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9966 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9967 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9968
9969 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9970 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9971
9972 IEM_MC_PREPARE_FPU_USAGE();
9973 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
9974 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9975 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9976 } IEM_MC_ELSE() {
9977 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
9978 } IEM_MC_ENDIF();
9979 IEM_MC_ADVANCE_RIP_AND_FINISH();
9980
9981 IEM_MC_END();
9982}
9983
9984
9985/** Opcode 0xda 0xe9. */
9986FNIEMOP_DEF(iemOp_fucompp)
9987{
9988 IEMOP_MNEMONIC(fucompp, "fucompp");
9989 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
9990}
9991
9992
9993/**
9994 * Common worker for FPU instructions working on ST0 and an m32i, and storing
9995 * the result in ST0.
9996 *
9997 * @param bRm Mod R/M byte.
9998 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9999 */
10000FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
10001{
10002 IEM_MC_BEGIN(3, 3);
10003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10004 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10005 IEM_MC_LOCAL(int32_t, i32Val2);
10006 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10007 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10008 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10009
10010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10012
10013 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10014 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10015 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10016
10017 IEM_MC_PREPARE_FPU_USAGE();
10018 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10019 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
10020 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10021 } IEM_MC_ELSE() {
10022 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10023 } IEM_MC_ENDIF();
10024 IEM_MC_ADVANCE_RIP_AND_FINISH();
10025
10026 IEM_MC_END();
10027}
10028
10029
10030/** Opcode 0xda !11/0. */
10031FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
10032{
10033 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
10034 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
10035}
10036
10037
10038/** Opcode 0xda !11/1. */
10039FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
10040{
10041 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
10042 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
10043}
10044
10045
10046/** Opcode 0xda !11/2. */
10047FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
10048{
10049 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
10050
10051 IEM_MC_BEGIN(3, 3);
10052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10053 IEM_MC_LOCAL(uint16_t, u16Fsw);
10054 IEM_MC_LOCAL(int32_t, i32Val2);
10055 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10056 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10057 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10058
10059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10061
10062 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10063 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10064 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10065
10066 IEM_MC_PREPARE_FPU_USAGE();
10067 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10068 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10069 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10070 } IEM_MC_ELSE() {
10071 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10072 } IEM_MC_ENDIF();
10073 IEM_MC_ADVANCE_RIP_AND_FINISH();
10074
10075 IEM_MC_END();
10076}
10077
10078
10079/** Opcode 0xda !11/3. */
10080FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
10081{
10082 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
10083
10084 IEM_MC_BEGIN(3, 3);
10085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10086 IEM_MC_LOCAL(uint16_t, u16Fsw);
10087 IEM_MC_LOCAL(int32_t, i32Val2);
10088 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10089 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10090 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10091
10092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10094
10095 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10096 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10097 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10098
10099 IEM_MC_PREPARE_FPU_USAGE();
10100 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10101 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10102 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10103 } IEM_MC_ELSE() {
10104 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10105 } IEM_MC_ENDIF();
10106 IEM_MC_ADVANCE_RIP_AND_FINISH();
10107
10108 IEM_MC_END();
10109}
10110
10111
10112/** Opcode 0xda !11/4. */
10113FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
10114{
10115 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
10116 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
10117}
10118
10119
10120/** Opcode 0xda !11/5. */
10121FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
10122{
10123 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
10124 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
10125}
10126
10127
10128/** Opcode 0xda !11/6. */
10129FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
10130{
10131 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
10132 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
10133}
10134
10135
10136/** Opcode 0xda !11/7. */
10137FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
10138{
10139 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
10140 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
10141}
10142
10143
10144/**
10145 * @opcode 0xda
10146 */
10147FNIEMOP_DEF(iemOp_EscF2)
10148{
10149 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10150 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
10151 if (IEM_IS_MODRM_REG_MODE(bRm))
10152 {
10153 switch (IEM_GET_MODRM_REG_8(bRm))
10154 {
10155 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
10156 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
10157 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
10158 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
10159 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10160 case 5:
10161 if (bRm == 0xe9)
10162 return FNIEMOP_CALL(iemOp_fucompp);
10163 IEMOP_RAISE_INVALID_OPCODE_RET();
10164 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10165 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10166 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10167 }
10168 }
10169 else
10170 {
10171 switch (IEM_GET_MODRM_REG_8(bRm))
10172 {
10173 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
10174 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
10175 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
10176 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
10177 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
10178 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
10179 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
10180 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
10181 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10182 }
10183 }
10184}
10185
10186
10187/** Opcode 0xdb !11/0. */
10188FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
10189{
10190 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
10191
10192 IEM_MC_BEGIN(2, 3);
10193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10194 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10195 IEM_MC_LOCAL(int32_t, i32Val);
10196 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10197 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
10198
10199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10201
10202 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10203 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10204 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10205
10206 IEM_MC_PREPARE_FPU_USAGE();
10207 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10208 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
10209 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10210 } IEM_MC_ELSE() {
10211 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10212 } IEM_MC_ENDIF();
10213 IEM_MC_ADVANCE_RIP_AND_FINISH();
10214
10215 IEM_MC_END();
10216}
10217
10218
10219/** Opcode 0xdb !11/1. */
10220FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
10221{
10222 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
10223 IEM_MC_BEGIN(3, 2);
10224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10225 IEM_MC_LOCAL(uint16_t, u16Fsw);
10226 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10227 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10228 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10229
10230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10232 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10233 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10234
10235 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10236 IEM_MC_PREPARE_FPU_USAGE();
10237 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10238 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10239 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10240 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10241 } IEM_MC_ELSE() {
10242 IEM_MC_IF_FCW_IM() {
10243 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10244 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10245 } IEM_MC_ENDIF();
10246 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10247 } IEM_MC_ENDIF();
10248 IEM_MC_ADVANCE_RIP_AND_FINISH();
10249
10250 IEM_MC_END();
10251}
10252
10253
10254/** Opcode 0xdb !11/2. */
10255FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
10256{
10257 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
10258 IEM_MC_BEGIN(3, 2);
10259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10260 IEM_MC_LOCAL(uint16_t, u16Fsw);
10261 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10262 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10263 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10264
10265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10267 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10268 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10269
10270 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10271 IEM_MC_PREPARE_FPU_USAGE();
10272 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10273 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10274 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10275 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10276 } IEM_MC_ELSE() {
10277 IEM_MC_IF_FCW_IM() {
10278 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10279 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10280 } IEM_MC_ENDIF();
10281 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10282 } IEM_MC_ENDIF();
10283 IEM_MC_ADVANCE_RIP_AND_FINISH();
10284
10285 IEM_MC_END();
10286}
10287
10288
10289/** Opcode 0xdb !11/3. */
10290FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
10291{
10292 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
10293 IEM_MC_BEGIN(3, 2);
10294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10295 IEM_MC_LOCAL(uint16_t, u16Fsw);
10296 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10297 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10298 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10299
10300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10302 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10303 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10304
10305 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10306 IEM_MC_PREPARE_FPU_USAGE();
10307 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10308 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10309 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10310 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10311 } IEM_MC_ELSE() {
10312 IEM_MC_IF_FCW_IM() {
10313 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10314 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10315 } IEM_MC_ENDIF();
10316 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10317 } IEM_MC_ENDIF();
10318 IEM_MC_ADVANCE_RIP_AND_FINISH();
10319
10320 IEM_MC_END();
10321}
10322
10323
10324/** Opcode 0xdb !11/5. */
10325FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
10326{
10327 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
10328
10329 IEM_MC_BEGIN(2, 3);
10330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10331 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10332 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
10333 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10334 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
10335
10336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10338
10339 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10340 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10341 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10342
10343 IEM_MC_PREPARE_FPU_USAGE();
10344 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10345 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
10346 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10347 } IEM_MC_ELSE() {
10348 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10349 } IEM_MC_ENDIF();
10350 IEM_MC_ADVANCE_RIP_AND_FINISH();
10351
10352 IEM_MC_END();
10353}
10354
10355
10356/** Opcode 0xdb !11/7. */
10357FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
10358{
10359 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
10360 IEM_MC_BEGIN(3, 2);
10361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10362 IEM_MC_LOCAL(uint16_t, u16Fsw);
10363 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10364 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
10365 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10366
10367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10369 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10370 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10371
10372 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
10373 IEM_MC_PREPARE_FPU_USAGE();
10374 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10375 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
10376 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
10377 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10378 } IEM_MC_ELSE() {
10379 IEM_MC_IF_FCW_IM() {
10380 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
10381 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
10382 } IEM_MC_ENDIF();
10383 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10384 } IEM_MC_ENDIF();
10385 IEM_MC_ADVANCE_RIP_AND_FINISH();
10386
10387 IEM_MC_END();
10388}
10389
10390
10391/** Opcode 0xdb 11/0. */
10392FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
10393{
10394 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
10395 IEM_MC_BEGIN(0, 1);
10396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10397 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10398
10399 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10400 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10401
10402 IEM_MC_PREPARE_FPU_USAGE();
10403 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10404 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
10405 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10406 } IEM_MC_ENDIF();
10407 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10408 } IEM_MC_ELSE() {
10409 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10410 } IEM_MC_ENDIF();
10411 IEM_MC_ADVANCE_RIP_AND_FINISH();
10412
10413 IEM_MC_END();
10414}
10415
10416
10417/** Opcode 0xdb 11/1. */
10418FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
10419{
10420 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
10421 IEM_MC_BEGIN(0, 1);
10422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10423 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10424
10425 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10426 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10427
10428 IEM_MC_PREPARE_FPU_USAGE();
10429 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10430 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10431 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10432 } IEM_MC_ENDIF();
10433 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10434 } IEM_MC_ELSE() {
10435 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10436 } IEM_MC_ENDIF();
10437 IEM_MC_ADVANCE_RIP_AND_FINISH();
10438
10439 IEM_MC_END();
10440}
10441
10442
10443/** Opcode 0xdb 11/2. */
10444FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
10445{
10446 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
10447 IEM_MC_BEGIN(0, 1);
10448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10449 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10450
10451 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10452 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10453
10454 IEM_MC_PREPARE_FPU_USAGE();
10455 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10456 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10457 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10458 } IEM_MC_ENDIF();
10459 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10460 } IEM_MC_ELSE() {
10461 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10462 } IEM_MC_ENDIF();
10463 IEM_MC_ADVANCE_RIP_AND_FINISH();
10464
10465 IEM_MC_END();
10466}
10467
10468
10469/** Opcode 0xdb 11/3. */
10470FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
10471{
10472 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
10473 IEM_MC_BEGIN(0, 1);
10474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10475 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10476
10477 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10478 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10479
10480 IEM_MC_PREPARE_FPU_USAGE();
10481 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10482 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
10483 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10484 } IEM_MC_ENDIF();
10485 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10486 } IEM_MC_ELSE() {
10487 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10488 } IEM_MC_ENDIF();
10489 IEM_MC_ADVANCE_RIP_AND_FINISH();
10490
10491 IEM_MC_END();
10492}
10493
10494
10495/** Opcode 0xdb 0xe0. */
10496FNIEMOP_DEF(iemOp_fneni)
10497{
10498 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
10499 IEM_MC_BEGIN(0,0);
10500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10501 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10502 IEM_MC_ADVANCE_RIP_AND_FINISH();
10503 IEM_MC_END();
10504}
10505
10506
10507/** Opcode 0xdb 0xe1. */
10508FNIEMOP_DEF(iemOp_fndisi)
10509{
10510 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
10511 IEM_MC_BEGIN(0,0);
10512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10513 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10514 IEM_MC_ADVANCE_RIP_AND_FINISH();
10515 IEM_MC_END();
10516}
10517
10518
10519/** Opcode 0xdb 0xe2. */
10520FNIEMOP_DEF(iemOp_fnclex)
10521{
10522 IEMOP_MNEMONIC(fnclex, "fnclex");
10523 IEM_MC_BEGIN(0,0);
10524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10525 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10526 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10527 IEM_MC_CLEAR_FSW_EX();
10528 IEM_MC_ADVANCE_RIP_AND_FINISH();
10529 IEM_MC_END();
10530}
10531
10532
10533/** Opcode 0xdb 0xe3. */
10534FNIEMOP_DEF(iemOp_fninit)
10535{
10536 IEMOP_MNEMONIC(fninit, "fninit");
10537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10538 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, iemCImpl_finit, false /*fCheckXcpts*/);
10539}
10540
10541
10542/** Opcode 0xdb 0xe4. */
10543FNIEMOP_DEF(iemOp_fnsetpm)
10544{
10545 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
10546 IEM_MC_BEGIN(0,0);
10547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10548 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10549 IEM_MC_ADVANCE_RIP_AND_FINISH();
10550 IEM_MC_END();
10551}
10552
10553
10554/** Opcode 0xdb 0xe5. */
10555FNIEMOP_DEF(iemOp_frstpm)
10556{
10557 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
10558#if 0 /* #UDs on newer CPUs */
10559 IEM_MC_BEGIN(0,0);
10560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10561 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10562 IEM_MC_ADVANCE_RIP_AND_FINISH();
10563 IEM_MC_END();
10564 return VINF_SUCCESS;
10565#else
10566 IEMOP_RAISE_INVALID_OPCODE_RET();
10567#endif
10568}
10569
10570
10571/** Opcode 0xdb 11/5. */
10572FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
10573{
10574 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
10575 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10576 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fucomi_r80_by_r80,
10577 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10578}
10579
10580
10581/** Opcode 0xdb 11/6. */
10582FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
10583{
10584 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
10585 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10586 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
10587 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10588}
10589
10590
10591/**
10592 * @opcode 0xdb
10593 */
10594FNIEMOP_DEF(iemOp_EscF3)
10595{
10596 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10597 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
10598 if (IEM_IS_MODRM_REG_MODE(bRm))
10599 {
10600 switch (IEM_GET_MODRM_REG_8(bRm))
10601 {
10602 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
10603 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
10604 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
10605 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
10606 case 4:
10607 switch (bRm)
10608 {
10609 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
10610 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
10611 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
10612 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
10613 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
10614 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
10615 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
10616 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
10617 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10618 }
10619 break;
10620 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
10621 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
10622 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10623 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10624 }
10625 }
10626 else
10627 {
10628 switch (IEM_GET_MODRM_REG_8(bRm))
10629 {
10630 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
10631 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
10632 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
10633 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
10634 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10635 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
10636 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10637 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
10638 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10639 }
10640 }
10641}
10642
10643
10644/**
10645 * Common worker for FPU instructions working on STn and ST0, and storing the
10646 * result in STn unless IE, DE or ZE was raised.
10647 *
10648 * @param bRm Mod R/M byte.
10649 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10650 */
10651FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10652{
10653 IEM_MC_BEGIN(3, 1);
10654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10655 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10656 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10657 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10658 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10659
10660 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10661 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10662
10663 IEM_MC_PREPARE_FPU_USAGE();
10664 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10665 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10666 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10667 } IEM_MC_ELSE() {
10668 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10669 } IEM_MC_ENDIF();
10670 IEM_MC_ADVANCE_RIP_AND_FINISH();
10671
10672 IEM_MC_END();
10673}
10674
10675
10676/** Opcode 0xdc 11/0. */
10677FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
10678{
10679 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
10680 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
10681}
10682
10683
10684/** Opcode 0xdc 11/1. */
10685FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
10686{
10687 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
10688 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
10689}
10690
10691
10692/** Opcode 0xdc 11/4. */
10693FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
10694{
10695 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
10696 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
10697}
10698
10699
10700/** Opcode 0xdc 11/5. */
10701FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
10702{
10703 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
10704 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
10705}
10706
10707
10708/** Opcode 0xdc 11/6. */
10709FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
10710{
10711 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
10712 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
10713}
10714
10715
10716/** Opcode 0xdc 11/7. */
10717FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
10718{
10719 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
10720 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
10721}
10722
10723
10724/**
10725 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
10726 * memory operand, and storing the result in ST0.
10727 *
10728 * @param bRm Mod R/M byte.
10729 * @param pfnImpl Pointer to the instruction implementation (assembly).
10730 */
10731FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
10732{
10733 IEM_MC_BEGIN(3, 3);
10734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10735 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10736 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
10737 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10738 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
10739 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
10740
10741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10743 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10744 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10745
10746 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10747 IEM_MC_PREPARE_FPU_USAGE();
10748 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
10749 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
10750 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10751 } IEM_MC_ELSE() {
10752 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10753 } IEM_MC_ENDIF();
10754 IEM_MC_ADVANCE_RIP_AND_FINISH();
10755
10756 IEM_MC_END();
10757}
10758
10759
10760/** Opcode 0xdc !11/0. */
10761FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
10762{
10763 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
10764 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
10765}
10766
10767
10768/** Opcode 0xdc !11/1. */
10769FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
10770{
10771 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
10772 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
10773}
10774
10775
10776/** Opcode 0xdc !11/2. */
10777FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
10778{
10779 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
10780
10781 IEM_MC_BEGIN(3, 3);
10782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10783 IEM_MC_LOCAL(uint16_t, u16Fsw);
10784 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10785 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10786 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10787 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10788
10789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10791
10792 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10793 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10794 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10795
10796 IEM_MC_PREPARE_FPU_USAGE();
10797 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10798 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10799 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10800 } IEM_MC_ELSE() {
10801 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10802 } IEM_MC_ENDIF();
10803 IEM_MC_ADVANCE_RIP_AND_FINISH();
10804
10805 IEM_MC_END();
10806}
10807
10808
10809/** Opcode 0xdc !11/3. */
10810FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
10811{
10812 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
10813
10814 IEM_MC_BEGIN(3, 3);
10815 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10816 IEM_MC_LOCAL(uint16_t, u16Fsw);
10817 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10818 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10819 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10820 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10821
10822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10824
10825 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10826 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10827 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10828
10829 IEM_MC_PREPARE_FPU_USAGE();
10830 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10831 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10832 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10833 } IEM_MC_ELSE() {
10834 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10835 } IEM_MC_ENDIF();
10836 IEM_MC_ADVANCE_RIP_AND_FINISH();
10837
10838 IEM_MC_END();
10839}
10840
10841
10842/** Opcode 0xdc !11/4. */
10843FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
10844{
10845 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
10846 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
10847}
10848
10849
10850/** Opcode 0xdc !11/5. */
10851FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
10852{
10853 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
10854 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
10855}
10856
10857
10858/** Opcode 0xdc !11/6. */
10859FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
10860{
10861 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
10862 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
10863}
10864
10865
10866/** Opcode 0xdc !11/7. */
10867FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
10868{
10869 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
10870 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
10871}
10872
10873
10874/**
10875 * @opcode 0xdc
10876 */
10877FNIEMOP_DEF(iemOp_EscF4)
10878{
10879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10880 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
10881 if (IEM_IS_MODRM_REG_MODE(bRm))
10882 {
10883 switch (IEM_GET_MODRM_REG_8(bRm))
10884 {
10885 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
10886 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
10887 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
10888 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
10889 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
10890 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
10891 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
10892 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
10893 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10894 }
10895 }
10896 else
10897 {
10898 switch (IEM_GET_MODRM_REG_8(bRm))
10899 {
10900 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
10901 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
10902 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
10903 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
10904 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
10905 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
10906 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
10907 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
10908 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10909 }
10910 }
10911}
10912
10913
10914/** Opcode 0xdd !11/0.
10915 * @sa iemOp_fld_m32r */
10916FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
10917{
10918 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
10919
10920 IEM_MC_BEGIN(2, 3);
10921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10922 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10923 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
10924 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10925 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
10926
10927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10929 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10930 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10931
10932 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10933 IEM_MC_PREPARE_FPU_USAGE();
10934 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10935 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
10936 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10937 } IEM_MC_ELSE() {
10938 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10939 } IEM_MC_ENDIF();
10940 IEM_MC_ADVANCE_RIP_AND_FINISH();
10941
10942 IEM_MC_END();
10943}
10944
10945
10946/** Opcode 0xdd !11/0. */
10947FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
10948{
10949 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
10950 IEM_MC_BEGIN(3, 2);
10951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10952 IEM_MC_LOCAL(uint16_t, u16Fsw);
10953 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10954 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10955 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10956
10957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10959 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10960 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10961
10962 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10963 IEM_MC_PREPARE_FPU_USAGE();
10964 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10965 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10966 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10967 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10968 } IEM_MC_ELSE() {
10969 IEM_MC_IF_FCW_IM() {
10970 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10971 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10972 } IEM_MC_ENDIF();
10973 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10974 } IEM_MC_ENDIF();
10975 IEM_MC_ADVANCE_RIP_AND_FINISH();
10976
10977 IEM_MC_END();
10978}
10979
10980
10981/** Opcode 0xdd !11/0. */
10982FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
10983{
10984 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
10985 IEM_MC_BEGIN(3, 2);
10986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10987 IEM_MC_LOCAL(uint16_t, u16Fsw);
10988 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10989 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10990 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10991
10992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10994 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10995 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10996
10997 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10998 IEM_MC_PREPARE_FPU_USAGE();
10999 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11000 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11001 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11002 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11003 } IEM_MC_ELSE() {
11004 IEM_MC_IF_FCW_IM() {
11005 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11006 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11007 } IEM_MC_ENDIF();
11008 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11009 } IEM_MC_ENDIF();
11010 IEM_MC_ADVANCE_RIP_AND_FINISH();
11011
11012 IEM_MC_END();
11013}
11014
11015
11016
11017
11018/** Opcode 0xdd !11/0. */
11019FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
11020{
11021 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
11022 IEM_MC_BEGIN(3, 2);
11023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11024 IEM_MC_LOCAL(uint16_t, u16Fsw);
11025 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11026 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11027 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11028
11029 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11031 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11032 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11033
11034 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11035 IEM_MC_PREPARE_FPU_USAGE();
11036 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11037 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11038 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11039 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11040 } IEM_MC_ELSE() {
11041 IEM_MC_IF_FCW_IM() {
11042 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11043 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11044 } IEM_MC_ENDIF();
11045 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11046 } IEM_MC_ENDIF();
11047 IEM_MC_ADVANCE_RIP_AND_FINISH();
11048
11049 IEM_MC_END();
11050}
11051
11052
11053/** Opcode 0xdd !11/0. */
11054FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
11055{
11056 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
11057 IEM_MC_BEGIN(3, 0);
11058 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11059 IEM_MC_ARG(uint8_t, iEffSeg, 1);
11060 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
11061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11063 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11064 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11065 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11066 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
11067 IEM_MC_END();
11068}
11069
11070
11071/** Opcode 0xdd !11/0. */
11072FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
11073{
11074 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
11075 IEM_MC_BEGIN(3, 0);
11076 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11077 IEM_MC_ARG(uint8_t, iEffSeg, 1);
11078 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
11079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11081 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11082 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
11083 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11084 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
11085 IEM_MC_END();
11086}
11087
11088/** Opcode 0xdd !11/0. */
11089FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
11090{
11091 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
11092
11093 IEM_MC_BEGIN(0, 2);
11094 IEM_MC_LOCAL(uint16_t, u16Tmp);
11095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11096
11097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11099 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11100
11101 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11102 IEM_MC_FETCH_FSW(u16Tmp);
11103 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
11104 IEM_MC_ADVANCE_RIP_AND_FINISH();
11105
11106/** @todo Debug / drop a hint to the verifier that things may differ
11107 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
11108 * NT4SP1. (X86_FSW_PE) */
11109 IEM_MC_END();
11110}
11111
11112
11113/** Opcode 0xdd 11/0. */
11114FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
11115{
11116 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
11117 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
11118 unmodified. */
11119 IEM_MC_BEGIN(0, 0);
11120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11121
11122 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11123 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11124
11125 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11126 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11127 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11128
11129 IEM_MC_ADVANCE_RIP_AND_FINISH();
11130 IEM_MC_END();
11131}
11132
11133
11134/** Opcode 0xdd 11/1. */
11135FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
11136{
11137 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
11138 IEM_MC_BEGIN(0, 2);
11139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11140 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11141 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11142 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11143 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11144
11145 IEM_MC_PREPARE_FPU_USAGE();
11146 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11147 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11148 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11149 } IEM_MC_ELSE() {
11150 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11151 } IEM_MC_ENDIF();
11152
11153 IEM_MC_ADVANCE_RIP_AND_FINISH();
11154 IEM_MC_END();
11155}
11156
11157
11158/** Opcode 0xdd 11/3. */
11159FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
11160{
11161 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
11162 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
11163}
11164
11165
11166/** Opcode 0xdd 11/4. */
11167FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
11168{
11169 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
11170 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
11171}
11172
11173
11174/**
11175 * @opcode 0xdd
11176 */
11177FNIEMOP_DEF(iemOp_EscF5)
11178{
11179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11180 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
11181 if (IEM_IS_MODRM_REG_MODE(bRm))
11182 {
11183 switch (IEM_GET_MODRM_REG_8(bRm))
11184 {
11185 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
11186 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
11187 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
11188 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
11189 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
11190 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
11191 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11192 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11193 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11194 }
11195 }
11196 else
11197 {
11198 switch (IEM_GET_MODRM_REG_8(bRm))
11199 {
11200 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
11201 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
11202 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
11203 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
11204 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
11205 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
11206 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
11207 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
11208 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11209 }
11210 }
11211}
11212
11213
11214/** Opcode 0xde 11/0. */
11215FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
11216{
11217 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
11218 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
11219}
11220
11221
11222/** Opcode 0xde 11/0. */
11223FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
11224{
11225 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
11226 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
11227}
11228
11229
11230/** Opcode 0xde 0xd9. */
11231FNIEMOP_DEF(iemOp_fcompp)
11232{
11233 IEMOP_MNEMONIC(fcompp, "fcompp");
11234 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
11235}
11236
11237
11238/** Opcode 0xde 11/4. */
11239FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
11240{
11241 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
11242 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
11243}
11244
11245
11246/** Opcode 0xde 11/5. */
11247FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
11248{
11249 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
11250 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
11251}
11252
11253
11254/** Opcode 0xde 11/6. */
11255FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
11256{
11257 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
11258 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
11259}
11260
11261
11262/** Opcode 0xde 11/7. */
11263FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
11264{
11265 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
11266 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
11267}
11268
11269
11270/**
11271 * Common worker for FPU instructions working on ST0 and an m16i, and storing
11272 * the result in ST0.
11273 *
11274 * @param bRm Mod R/M byte.
11275 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11276 */
11277FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
11278{
11279 IEM_MC_BEGIN(3, 3);
11280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11281 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11282 IEM_MC_LOCAL(int16_t, i16Val2);
11283 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11284 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11285 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11286
11287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11289
11290 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11291 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11292 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11293
11294 IEM_MC_PREPARE_FPU_USAGE();
11295 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11296 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
11297 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11298 } IEM_MC_ELSE() {
11299 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11300 } IEM_MC_ENDIF();
11301 IEM_MC_ADVANCE_RIP_AND_FINISH();
11302
11303 IEM_MC_END();
11304}
11305
11306
11307/** Opcode 0xde !11/0. */
11308FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
11309{
11310 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
11311 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
11312}
11313
11314
11315/** Opcode 0xde !11/1. */
11316FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
11317{
11318 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
11319 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
11320}
11321
11322
11323/** Opcode 0xde !11/2. */
11324FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
11325{
11326 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
11327
11328 IEM_MC_BEGIN(3, 3);
11329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11330 IEM_MC_LOCAL(uint16_t, u16Fsw);
11331 IEM_MC_LOCAL(int16_t, i16Val2);
11332 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11333 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11334 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11335
11336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11338
11339 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11340 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11341 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11342
11343 IEM_MC_PREPARE_FPU_USAGE();
11344 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11345 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11346 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11347 } IEM_MC_ELSE() {
11348 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11349 } IEM_MC_ENDIF();
11350 IEM_MC_ADVANCE_RIP_AND_FINISH();
11351
11352 IEM_MC_END();
11353}
11354
11355
11356/** Opcode 0xde !11/3. */
11357FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
11358{
11359 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
11360
11361 IEM_MC_BEGIN(3, 3);
11362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11363 IEM_MC_LOCAL(uint16_t, u16Fsw);
11364 IEM_MC_LOCAL(int16_t, i16Val2);
11365 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11366 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11367 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11368
11369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11371
11372 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11373 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11374 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11375
11376 IEM_MC_PREPARE_FPU_USAGE();
11377 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11378 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11379 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11380 } IEM_MC_ELSE() {
11381 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11382 } IEM_MC_ENDIF();
11383 IEM_MC_ADVANCE_RIP_AND_FINISH();
11384
11385 IEM_MC_END();
11386}
11387
11388
11389/** Opcode 0xde !11/4. */
11390FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
11391{
11392 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
11393 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
11394}
11395
11396
11397/** Opcode 0xde !11/5. */
11398FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
11399{
11400 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
11401 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
11402}
11403
11404
11405/** Opcode 0xde !11/6. */
11406FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
11407{
11408 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
11409 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
11410}
11411
11412
11413/** Opcode 0xde !11/7. */
11414FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
11415{
11416 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
11417 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
11418}
11419
11420
11421/**
11422 * @opcode 0xde
11423 */
11424FNIEMOP_DEF(iemOp_EscF6)
11425{
11426 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11427 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
11428 if (IEM_IS_MODRM_REG_MODE(bRm))
11429 {
11430 switch (IEM_GET_MODRM_REG_8(bRm))
11431 {
11432 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
11433 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
11434 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
11435 case 3: if (bRm == 0xd9)
11436 return FNIEMOP_CALL(iemOp_fcompp);
11437 IEMOP_RAISE_INVALID_OPCODE_RET();
11438 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
11439 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
11440 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
11441 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
11442 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11443 }
11444 }
11445 else
11446 {
11447 switch (IEM_GET_MODRM_REG_8(bRm))
11448 {
11449 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
11450 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
11451 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
11452 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
11453 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
11454 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
11455 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
11456 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
11457 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11458 }
11459 }
11460}
11461
11462
11463/** Opcode 0xdf 11/0.
11464 * Undocument instruction, assumed to work like ffree + fincstp. */
11465FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
11466{
11467 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
11468 IEM_MC_BEGIN(0, 0);
11469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11470
11471 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11472 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11473
11474 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11475 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11476 IEM_MC_FPU_STACK_INC_TOP();
11477 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11478
11479 IEM_MC_ADVANCE_RIP_AND_FINISH();
11480 IEM_MC_END();
11481}
11482
11483
11484/** Opcode 0xdf 0xe0. */
11485FNIEMOP_DEF(iemOp_fnstsw_ax)
11486{
11487 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
11488 IEM_MC_BEGIN(0, 1);
11489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11490 IEM_MC_LOCAL(uint16_t, u16Tmp);
11491 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11492 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11493 IEM_MC_FETCH_FSW(u16Tmp);
11494 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
11495 IEM_MC_ADVANCE_RIP_AND_FINISH();
11496 IEM_MC_END();
11497}
11498
11499
11500/** Opcode 0xdf 11/5. */
11501FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
11502{
11503 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
11504 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
11505 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
11506 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11507}
11508
11509
11510/** Opcode 0xdf 11/6. */
11511FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
11512{
11513 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
11514 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
11515 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
11516 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11517}
11518
11519
11520/** Opcode 0xdf !11/0. */
11521FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
11522{
11523 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
11524
11525 IEM_MC_BEGIN(2, 3);
11526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11527 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11528 IEM_MC_LOCAL(int16_t, i16Val);
11529 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11530 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
11531
11532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11534
11535 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11536 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11537 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11538
11539 IEM_MC_PREPARE_FPU_USAGE();
11540 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11541 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
11542 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11543 } IEM_MC_ELSE() {
11544 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11545 } IEM_MC_ENDIF();
11546 IEM_MC_ADVANCE_RIP_AND_FINISH();
11547
11548 IEM_MC_END();
11549}
11550
11551
11552/** Opcode 0xdf !11/1. */
11553FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
11554{
11555 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
11556 IEM_MC_BEGIN(3, 2);
11557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11558 IEM_MC_LOCAL(uint16_t, u16Fsw);
11559 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11560 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11561 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11562
11563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11565 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11566 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11567
11568 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11569 IEM_MC_PREPARE_FPU_USAGE();
11570 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11571 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11572 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11573 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11574 } IEM_MC_ELSE() {
11575 IEM_MC_IF_FCW_IM() {
11576 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11577 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11578 } IEM_MC_ENDIF();
11579 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11580 } IEM_MC_ENDIF();
11581 IEM_MC_ADVANCE_RIP_AND_FINISH();
11582
11583 IEM_MC_END();
11584}
11585
11586
11587/** Opcode 0xdf !11/2. */
11588FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
11589{
11590 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
11591 IEM_MC_BEGIN(3, 2);
11592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11593 IEM_MC_LOCAL(uint16_t, u16Fsw);
11594 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11595 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11596 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11597
11598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11600 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11601 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11602
11603 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11604 IEM_MC_PREPARE_FPU_USAGE();
11605 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11606 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11607 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11608 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11609 } IEM_MC_ELSE() {
11610 IEM_MC_IF_FCW_IM() {
11611 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11612 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11613 } IEM_MC_ENDIF();
11614 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11615 } IEM_MC_ENDIF();
11616 IEM_MC_ADVANCE_RIP_AND_FINISH();
11617
11618 IEM_MC_END();
11619}
11620
11621
11622/** Opcode 0xdf !11/3. */
11623FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
11624{
11625 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
11626 IEM_MC_BEGIN(3, 2);
11627 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11628 IEM_MC_LOCAL(uint16_t, u16Fsw);
11629 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11630 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11631 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11632
11633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11635 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11636 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11637
11638 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11639 IEM_MC_PREPARE_FPU_USAGE();
11640 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11641 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11642 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11643 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11644 } IEM_MC_ELSE() {
11645 IEM_MC_IF_FCW_IM() {
11646 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11647 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11648 } IEM_MC_ENDIF();
11649 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11650 } IEM_MC_ENDIF();
11651 IEM_MC_ADVANCE_RIP_AND_FINISH();
11652
11653 IEM_MC_END();
11654}
11655
11656
11657/** Opcode 0xdf !11/4. */
11658FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
11659{
11660 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
11661
11662 IEM_MC_BEGIN(2, 3);
11663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11664 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11665 IEM_MC_LOCAL(RTPBCD80U, d80Val);
11666 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11667 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
11668
11669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11671
11672 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11673 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11674 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11675
11676 IEM_MC_PREPARE_FPU_USAGE();
11677 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11678 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
11679 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11680 } IEM_MC_ELSE() {
11681 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11682 } IEM_MC_ENDIF();
11683 IEM_MC_ADVANCE_RIP_AND_FINISH();
11684
11685 IEM_MC_END();
11686}
11687
11688
11689/** Opcode 0xdf !11/5. */
11690FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
11691{
11692 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
11693
11694 IEM_MC_BEGIN(2, 3);
11695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11696 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11697 IEM_MC_LOCAL(int64_t, i64Val);
11698 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11699 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
11700
11701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11703
11704 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11705 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11706 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11707
11708 IEM_MC_PREPARE_FPU_USAGE();
11709 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11710 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
11711 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11712 } IEM_MC_ELSE() {
11713 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11714 } IEM_MC_ENDIF();
11715 IEM_MC_ADVANCE_RIP_AND_FINISH();
11716
11717 IEM_MC_END();
11718}
11719
11720
11721/** Opcode 0xdf !11/6. */
11722FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
11723{
11724 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
11725 IEM_MC_BEGIN(3, 2);
11726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11727 IEM_MC_LOCAL(uint16_t, u16Fsw);
11728 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11729 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
11730 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11731
11732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11734 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11735 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11736
11737 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
11738 IEM_MC_PREPARE_FPU_USAGE();
11739 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11740 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
11741 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
11742 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11743 } IEM_MC_ELSE() {
11744 IEM_MC_IF_FCW_IM() {
11745 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
11746 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
11747 } IEM_MC_ENDIF();
11748 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11749 } IEM_MC_ENDIF();
11750 IEM_MC_ADVANCE_RIP_AND_FINISH();
11751
11752 IEM_MC_END();
11753}
11754
11755
11756/** Opcode 0xdf !11/7. */
11757FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
11758{
11759 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
11760 IEM_MC_BEGIN(3, 2);
11761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11762 IEM_MC_LOCAL(uint16_t, u16Fsw);
11763 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11764 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11765 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11766
11767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11769 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11770 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11771
11772 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11773 IEM_MC_PREPARE_FPU_USAGE();
11774 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11775 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11776 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11777 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11778 } IEM_MC_ELSE() {
11779 IEM_MC_IF_FCW_IM() {
11780 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11781 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
11782 } IEM_MC_ENDIF();
11783 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11784 } IEM_MC_ENDIF();
11785 IEM_MC_ADVANCE_RIP_AND_FINISH();
11786
11787 IEM_MC_END();
11788}
11789
11790
11791/**
11792 * @opcode 0xdf
11793 */
11794FNIEMOP_DEF(iemOp_EscF7)
11795{
11796 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11797 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
11798 if (IEM_IS_MODRM_REG_MODE(bRm))
11799 {
11800 switch (IEM_GET_MODRM_REG_8(bRm))
11801 {
11802 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
11803 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
11804 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11805 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11806 case 4: if (bRm == 0xe0)
11807 return FNIEMOP_CALL(iemOp_fnstsw_ax);
11808 IEMOP_RAISE_INVALID_OPCODE_RET();
11809 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
11810 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
11811 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11812 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11813 }
11814 }
11815 else
11816 {
11817 switch (IEM_GET_MODRM_REG_8(bRm))
11818 {
11819 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
11820 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
11821 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
11822 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
11823 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
11824 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
11825 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
11826 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
11827 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11828 }
11829 }
11830}
11831
11832
11833/**
11834 * @opcode 0xe0
11835 */
11836FNIEMOP_DEF(iemOp_loopne_Jb)
11837{
11838 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
11839 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11840 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11841
11842 switch (pVCpu->iem.s.enmEffAddrMode)
11843 {
11844 case IEMMODE_16BIT:
11845 IEM_MC_BEGIN(0,0);
11846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11847 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11848 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11849 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11850 } IEM_MC_ELSE() {
11851 IEM_MC_ADVANCE_RIP_AND_FINISH();
11852 } IEM_MC_ENDIF();
11853 IEM_MC_END();
11854 break;
11855
11856 case IEMMODE_32BIT:
11857 IEM_MC_BEGIN(0,0);
11858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11859 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11860 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11861 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11862 } IEM_MC_ELSE() {
11863 IEM_MC_ADVANCE_RIP_AND_FINISH();
11864 } IEM_MC_ENDIF();
11865 IEM_MC_END();
11866 break;
11867
11868 case IEMMODE_64BIT:
11869 IEM_MC_BEGIN(0,0);
11870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11871 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11872 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11873 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11874 } IEM_MC_ELSE() {
11875 IEM_MC_ADVANCE_RIP_AND_FINISH();
11876 } IEM_MC_ENDIF();
11877 IEM_MC_END();
11878 break;
11879
11880 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11881 }
11882}
11883
11884
11885/**
11886 * @opcode 0xe1
11887 */
11888FNIEMOP_DEF(iemOp_loope_Jb)
11889{
11890 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
11891 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11892 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11893
11894 switch (pVCpu->iem.s.enmEffAddrMode)
11895 {
11896 case IEMMODE_16BIT:
11897 IEM_MC_BEGIN(0,0);
11898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11899 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11900 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11901 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11902 } IEM_MC_ELSE() {
11903 IEM_MC_ADVANCE_RIP_AND_FINISH();
11904 } IEM_MC_ENDIF();
11905 IEM_MC_END();
11906 break;
11907
11908 case IEMMODE_32BIT:
11909 IEM_MC_BEGIN(0,0);
11910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11911 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11912 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11913 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11914 } IEM_MC_ELSE() {
11915 IEM_MC_ADVANCE_RIP_AND_FINISH();
11916 } IEM_MC_ENDIF();
11917 IEM_MC_END();
11918 break;
11919
11920 case IEMMODE_64BIT:
11921 IEM_MC_BEGIN(0,0);
11922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11923 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11924 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11925 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11926 } IEM_MC_ELSE() {
11927 IEM_MC_ADVANCE_RIP_AND_FINISH();
11928 } IEM_MC_ENDIF();
11929 IEM_MC_END();
11930 break;
11931
11932 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11933 }
11934}
11935
11936
11937/**
11938 * @opcode 0xe2
11939 */
11940FNIEMOP_DEF(iemOp_loop_Jb)
11941{
11942 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
11943 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11944 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11945
11946 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
11947 * using the 32-bit operand size override. How can that be restarted? See
11948 * weird pseudo code in intel manual. */
11949
11950 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
11951 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
11952 * the loop causes guest crashes, but when logging it's nice to skip a few million
11953 * lines of useless output. */
11954#if defined(LOG_ENABLED)
11955 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
11956 switch (pVCpu->iem.s.enmEffAddrMode)
11957 {
11958 case IEMMODE_16BIT:
11959 IEM_MC_BEGIN(0,0);
11960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11961 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
11962 IEM_MC_ADVANCE_RIP_AND_FINISH();
11963 IEM_MC_END();
11964 break;
11965
11966 case IEMMODE_32BIT:
11967 IEM_MC_BEGIN(0,0);
11968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11969 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
11970 IEM_MC_ADVANCE_RIP_AND_FINISH();
11971 IEM_MC_END();
11972 break;
11973
11974 case IEMMODE_64BIT:
11975 IEM_MC_BEGIN(0,0);
11976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11977 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
11978 IEM_MC_ADVANCE_RIP_AND_FINISH();
11979 IEM_MC_END();
11980 break;
11981
11982 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11983 }
11984#endif
11985
11986 switch (pVCpu->iem.s.enmEffAddrMode)
11987 {
11988 case IEMMODE_16BIT:
11989 IEM_MC_BEGIN(0,0);
11990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11991 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11992 IEM_MC_IF_CX_IS_NZ() {
11993 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11994 } IEM_MC_ELSE() {
11995 IEM_MC_ADVANCE_RIP_AND_FINISH();
11996 } IEM_MC_ENDIF();
11997 IEM_MC_END();
11998 break;
11999
12000 case IEMMODE_32BIT:
12001 IEM_MC_BEGIN(0,0);
12002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12003 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12004 IEM_MC_IF_ECX_IS_NZ() {
12005 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12006 } IEM_MC_ELSE() {
12007 IEM_MC_ADVANCE_RIP_AND_FINISH();
12008 } IEM_MC_ENDIF();
12009 IEM_MC_END();
12010 break;
12011
12012 case IEMMODE_64BIT:
12013 IEM_MC_BEGIN(0,0);
12014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12015 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12016 IEM_MC_IF_RCX_IS_NZ() {
12017 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12018 } IEM_MC_ELSE() {
12019 IEM_MC_ADVANCE_RIP_AND_FINISH();
12020 } IEM_MC_ENDIF();
12021 IEM_MC_END();
12022 break;
12023
12024 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12025 }
12026}
12027
12028
12029/**
12030 * @opcode 0xe3
12031 */
12032FNIEMOP_DEF(iemOp_jecxz_Jb)
12033{
12034 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
12035 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12036 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12037
12038 switch (pVCpu->iem.s.enmEffAddrMode)
12039 {
12040 case IEMMODE_16BIT:
12041 IEM_MC_BEGIN(0,0);
12042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12043 IEM_MC_IF_CX_IS_NZ() {
12044 IEM_MC_ADVANCE_RIP_AND_FINISH();
12045 } IEM_MC_ELSE() {
12046 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12047 } IEM_MC_ENDIF();
12048 IEM_MC_END();
12049 break;
12050
12051 case IEMMODE_32BIT:
12052 IEM_MC_BEGIN(0,0);
12053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12054 IEM_MC_IF_ECX_IS_NZ() {
12055 IEM_MC_ADVANCE_RIP_AND_FINISH();
12056 } IEM_MC_ELSE() {
12057 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12058 } IEM_MC_ENDIF();
12059 IEM_MC_END();
12060 break;
12061
12062 case IEMMODE_64BIT:
12063 IEM_MC_BEGIN(0,0);
12064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12065 IEM_MC_IF_RCX_IS_NZ() {
12066 IEM_MC_ADVANCE_RIP_AND_FINISH();
12067 } IEM_MC_ELSE() {
12068 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12069 } IEM_MC_ENDIF();
12070 IEM_MC_END();
12071 break;
12072
12073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12074 }
12075}
12076
12077
12078/** Opcode 0xe4 */
12079FNIEMOP_DEF(iemOp_in_AL_Ib)
12080{
12081 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
12082 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12084 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12085 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12086}
12087
12088
12089/** Opcode 0xe5 */
12090FNIEMOP_DEF(iemOp_in_eAX_Ib)
12091{
12092 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
12093 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12095 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12096 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12097 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12098}
12099
12100
12101/** Opcode 0xe6 */
12102FNIEMOP_DEF(iemOp_out_Ib_AL)
12103{
12104 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
12105 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12107 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12108 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12109}
12110
12111
12112/** Opcode 0xe7 */
12113FNIEMOP_DEF(iemOp_out_Ib_eAX)
12114{
12115 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
12116 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12118 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12119 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12120 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12121}
12122
12123
12124/**
12125 * @opcode 0xe8
12126 */
12127FNIEMOP_DEF(iemOp_call_Jv)
12128{
12129 IEMOP_MNEMONIC(call_Jv, "call Jv");
12130 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12131 switch (pVCpu->iem.s.enmEffOpSize)
12132 {
12133 case IEMMODE_16BIT:
12134 {
12135 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12136 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_16, (int16_t)u16Imm);
12137 }
12138
12139 case IEMMODE_32BIT:
12140 {
12141 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12142 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_32, (int32_t)u32Imm);
12143 }
12144
12145 case IEMMODE_64BIT:
12146 {
12147 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12148 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_64, u64Imm);
12149 }
12150
12151 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12152 }
12153}
12154
12155
12156/**
12157 * @opcode 0xe9
12158 */
12159FNIEMOP_DEF(iemOp_jmp_Jv)
12160{
12161 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
12162 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12163 switch (pVCpu->iem.s.enmEffOpSize)
12164 {
12165 case IEMMODE_16BIT:
12166 {
12167 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
12168 IEM_MC_BEGIN(0, 0);
12169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12170 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
12171 IEM_MC_END();
12172 break;
12173 }
12174
12175 case IEMMODE_64BIT:
12176 case IEMMODE_32BIT:
12177 {
12178 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
12179 IEM_MC_BEGIN(0, 0);
12180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12181 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
12182 IEM_MC_END();
12183 break;
12184 }
12185
12186 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12187 }
12188}
12189
12190
12191/**
12192 * @opcode 0xea
12193 */
12194FNIEMOP_DEF(iemOp_jmp_Ap)
12195{
12196 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
12197 IEMOP_HLP_NO_64BIT();
12198
12199 /* Decode the far pointer address and pass it on to the far call C implementation. */
12200 uint32_t off32Seg;
12201 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12202 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
12203 else
12204 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
12205 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
12206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12207 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
12208 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12209 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
12210}
12211
12212
12213/**
12214 * @opcode 0xeb
12215 */
12216FNIEMOP_DEF(iemOp_jmp_Jb)
12217{
12218 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
12219 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12220 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12221
12222 IEM_MC_BEGIN(0, 0);
12223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12224 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12225 IEM_MC_END();
12226}
12227
12228
12229/** Opcode 0xec */
12230FNIEMOP_DEF(iemOp_in_AL_DX)
12231{
12232 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
12233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12234 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12235 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
12236}
12237
12238
12239/** Opcode 0xed */
12240FNIEMOP_DEF(iemOp_in_eAX_DX)
12241{
12242 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
12243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12244 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12245 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12246 pVCpu->iem.s.enmEffAddrMode);
12247}
12248
12249
12250/** Opcode 0xee */
12251FNIEMOP_DEF(iemOp_out_DX_AL)
12252{
12253 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
12254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12255 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12256 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
12257}
12258
12259
12260/** Opcode 0xef */
12261FNIEMOP_DEF(iemOp_out_DX_eAX)
12262{
12263 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
12264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12265 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12266 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12267 pVCpu->iem.s.enmEffAddrMode);
12268}
12269
12270
12271/**
12272 * @opcode 0xf0
12273 */
12274FNIEMOP_DEF(iemOp_lock)
12275{
12276 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
12277 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12278 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
12279
12280 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12281 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12282}
12283
12284
12285/**
12286 * @opcode 0xf1
12287 */
12288FNIEMOP_DEF(iemOp_int1)
12289{
12290 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
12291 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
12292 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
12293 * LOADALL memo. Needs some testing. */
12294 IEMOP_HLP_MIN_386();
12295 /** @todo testcase! */
12296 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
12297 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
12298 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
12299}
12300
12301
12302/**
12303 * @opcode 0xf2
12304 */
12305FNIEMOP_DEF(iemOp_repne)
12306{
12307 /* This overrides any previous REPE prefix. */
12308 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
12309 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
12310 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
12311
12312 /* For the 4 entry opcode tables, REPNZ overrides any previous
12313 REPZ and operand size prefixes. */
12314 pVCpu->iem.s.idxPrefix = 3;
12315
12316 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12317 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12318}
12319
12320
12321/**
12322 * @opcode 0xf3
12323 */
12324FNIEMOP_DEF(iemOp_repe)
12325{
12326 /* This overrides any previous REPNE prefix. */
12327 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
12328 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
12329 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
12330
12331 /* For the 4 entry opcode tables, REPNZ overrides any previous
12332 REPNZ and operand size prefixes. */
12333 pVCpu->iem.s.idxPrefix = 2;
12334
12335 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12336 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12337}
12338
12339
12340/**
12341 * @opcode 0xf4
12342 */
12343FNIEMOP_DEF(iemOp_hlt)
12344{
12345 IEMOP_MNEMONIC(hlt, "hlt");
12346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12347 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, iemCImpl_hlt);
12348}
12349
12350
12351/**
12352 * @opcode 0xf5
12353 */
12354FNIEMOP_DEF(iemOp_cmc)
12355{
12356 IEMOP_MNEMONIC(cmc, "cmc");
12357 IEM_MC_BEGIN(0, 0);
12358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12359 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
12360 IEM_MC_ADVANCE_RIP_AND_FINISH();
12361 IEM_MC_END();
12362}
12363
12364
12365/**
12366 * Body for of 'inc/dec/not/neg Eb'.
12367 */
12368#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
12369 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
12370 { \
12371 /* register access */ \
12372 IEM_MC_BEGIN(2, 0); \
12373 IEMOP_HLP_DONE_DECODING(); \
12374 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12375 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12376 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
12377 IEM_MC_REF_EFLAGS(pEFlags); \
12378 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12379 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12380 IEM_MC_END(); \
12381 } \
12382 else \
12383 { \
12384 /* memory access. */ \
12385 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
12386 { \
12387 IEM_MC_BEGIN(2, 2); \
12388 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12389 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12391 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12392 \
12393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12394 IEMOP_HLP_DONE_DECODING(); \
12395 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12396 IEM_MC_FETCH_EFLAGS(EFlags); \
12397 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12398 \
12399 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12400 IEM_MC_COMMIT_EFLAGS(EFlags); \
12401 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12402 IEM_MC_END(); \
12403 } \
12404 else \
12405 { \
12406 IEM_MC_BEGIN(2, 2); \
12407 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12408 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12410 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12411 \
12412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12413 IEMOP_HLP_DONE_DECODING(); \
12414 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12415 IEM_MC_FETCH_EFLAGS(EFlags); \
12416 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
12417 \
12418 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12419 IEM_MC_COMMIT_EFLAGS(EFlags); \
12420 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12421 IEM_MC_END(); \
12422 } \
12423 } \
12424 (void)0
12425
12426
12427/**
12428 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
12429 */
12430#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
12431 if (IEM_IS_MODRM_REG_MODE(bRm)) \
12432 { \
12433 /* \
12434 * Register target \
12435 */ \
12436 switch (pVCpu->iem.s.enmEffOpSize) \
12437 { \
12438 case IEMMODE_16BIT: \
12439 IEM_MC_BEGIN(2, 0); \
12440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12441 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12442 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12443 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12444 IEM_MC_REF_EFLAGS(pEFlags); \
12445 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
12446 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12447 IEM_MC_END(); \
12448 break; \
12449 \
12450 case IEMMODE_32BIT: \
12451 IEM_MC_BEGIN(2, 0); \
12452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12453 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12454 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12455 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12456 IEM_MC_REF_EFLAGS(pEFlags); \
12457 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
12458 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
12459 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12460 IEM_MC_END(); \
12461 break; \
12462 \
12463 case IEMMODE_64BIT: \
12464 IEM_MC_BEGIN(2, 0); \
12465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12466 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12467 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12468 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12469 IEM_MC_REF_EFLAGS(pEFlags); \
12470 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
12471 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12472 IEM_MC_END(); \
12473 break; \
12474 \
12475 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12476 } \
12477 } \
12478 else \
12479 { \
12480 /* \
12481 * Memory target. \
12482 */ \
12483 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
12484 { \
12485 switch (pVCpu->iem.s.enmEffOpSize) \
12486 { \
12487 case IEMMODE_16BIT: \
12488 IEM_MC_BEGIN(2, 2); \
12489 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12490 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12492 \
12493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12495 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
12496 IEM_MC_FETCH_EFLAGS(EFlags); \
12497 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
12498 \
12499 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
12500 IEM_MC_COMMIT_EFLAGS(EFlags); \
12501 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12502 IEM_MC_END(); \
12503 break; \
12504 \
12505 case IEMMODE_32BIT: \
12506 IEM_MC_BEGIN(2, 2); \
12507 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12508 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12510 \
12511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12513 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
12514 IEM_MC_FETCH_EFLAGS(EFlags); \
12515 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
12516 \
12517 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
12518 IEM_MC_COMMIT_EFLAGS(EFlags); \
12519 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12520 IEM_MC_END(); \
12521 break; \
12522 \
12523 case IEMMODE_64BIT: \
12524 IEM_MC_BEGIN(2, 2); \
12525 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12526 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12528 \
12529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12531 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
12532 IEM_MC_FETCH_EFLAGS(EFlags); \
12533 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
12534 \
12535 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
12536 IEM_MC_COMMIT_EFLAGS(EFlags); \
12537 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12538 IEM_MC_END(); \
12539 break; \
12540 \
12541 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12542 } \
12543 } \
12544 else \
12545 { \
12546 (void)0
12547
12548#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
12549 switch (pVCpu->iem.s.enmEffOpSize) \
12550 { \
12551 case IEMMODE_16BIT: \
12552 IEM_MC_BEGIN(2, 2); \
12553 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12554 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12556 \
12557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12558 IEMOP_HLP_DONE_DECODING(); \
12559 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
12560 IEM_MC_FETCH_EFLAGS(EFlags); \
12561 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
12562 \
12563 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
12564 IEM_MC_COMMIT_EFLAGS(EFlags); \
12565 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12566 IEM_MC_END(); \
12567 break; \
12568 \
12569 case IEMMODE_32BIT: \
12570 IEM_MC_BEGIN(2, 2); \
12571 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12572 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12573 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12574 \
12575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12576 IEMOP_HLP_DONE_DECODING(); \
12577 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
12578 IEM_MC_FETCH_EFLAGS(EFlags); \
12579 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
12580 \
12581 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
12582 IEM_MC_COMMIT_EFLAGS(EFlags); \
12583 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12584 IEM_MC_END(); \
12585 break; \
12586 \
12587 case IEMMODE_64BIT: \
12588 IEM_MC_BEGIN(2, 2); \
12589 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12590 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12592 \
12593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12594 IEMOP_HLP_DONE_DECODING(); \
12595 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
12596 IEM_MC_FETCH_EFLAGS(EFlags); \
12597 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
12598 \
12599 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
12600 IEM_MC_COMMIT_EFLAGS(EFlags); \
12601 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12602 IEM_MC_END(); \
12603 break; \
12604 \
12605 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12606 } \
12607 } \
12608 } \
12609 (void)0
12610
12611
12612/**
12613 * @opmaps grp3_f6
12614 * @opcode /0
12615 * @todo also /1
12616 */
12617FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
12618{
12619 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
12620 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12621
12622 if (IEM_IS_MODRM_REG_MODE(bRm))
12623 {
12624 /* register access */
12625 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12626 IEM_MC_BEGIN(3, 0);
12627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12628 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12629 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
12630 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12631 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12632 IEM_MC_REF_EFLAGS(pEFlags);
12633 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
12634 IEM_MC_ADVANCE_RIP_AND_FINISH();
12635 IEM_MC_END();
12636 }
12637 else
12638 {
12639 /* memory access. */
12640 IEM_MC_BEGIN(3, 3);
12641 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
12642 IEM_MC_ARG(uint8_t, u8Src, 1);
12643 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12645 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12646
12647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12648 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12649 IEM_MC_ASSIGN(u8Src, u8Imm);
12650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12651 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12652 IEM_MC_FETCH_EFLAGS(EFlags);
12653 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
12654
12655 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo);
12656 IEM_MC_COMMIT_EFLAGS(EFlags);
12657 IEM_MC_ADVANCE_RIP_AND_FINISH();
12658 IEM_MC_END();
12659 }
12660}
12661
12662
12663/** Opcode 0xf6 /4, /5, /6 and /7. */
12664FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
12665{
12666 if (IEM_IS_MODRM_REG_MODE(bRm))
12667 {
12668 /* register access */
12669 IEM_MC_BEGIN(3, 1);
12670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12671 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12672 IEM_MC_ARG(uint8_t, u8Value, 1);
12673 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12674 IEM_MC_LOCAL(int32_t, rc);
12675
12676 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12677 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12678 IEM_MC_REF_EFLAGS(pEFlags);
12679 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12680 IEM_MC_IF_LOCAL_IS_Z(rc) {
12681 IEM_MC_ADVANCE_RIP_AND_FINISH();
12682 } IEM_MC_ELSE() {
12683 IEM_MC_RAISE_DIVIDE_ERROR();
12684 } IEM_MC_ENDIF();
12685
12686 IEM_MC_END();
12687 }
12688 else
12689 {
12690 /* memory access. */
12691 IEM_MC_BEGIN(3, 2);
12692 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12693 IEM_MC_ARG(uint8_t, u8Value, 1);
12694 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12696 IEM_MC_LOCAL(int32_t, rc);
12697
12698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12700 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12701 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12702 IEM_MC_REF_EFLAGS(pEFlags);
12703 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12704 IEM_MC_IF_LOCAL_IS_Z(rc) {
12705 IEM_MC_ADVANCE_RIP_AND_FINISH();
12706 } IEM_MC_ELSE() {
12707 IEM_MC_RAISE_DIVIDE_ERROR();
12708 } IEM_MC_ENDIF();
12709
12710 IEM_MC_END();
12711 }
12712}
12713
12714
12715/** Opcode 0xf7 /4, /5, /6 and /7. */
12716FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
12717{
12718 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12719
12720 if (IEM_IS_MODRM_REG_MODE(bRm))
12721 {
12722 /* register access */
12723 switch (pVCpu->iem.s.enmEffOpSize)
12724 {
12725 case IEMMODE_16BIT:
12726 {
12727 IEM_MC_BEGIN(4, 1);
12728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12729 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12730 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12731 IEM_MC_ARG(uint16_t, u16Value, 2);
12732 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12733 IEM_MC_LOCAL(int32_t, rc);
12734
12735 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12736 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12737 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12738 IEM_MC_REF_EFLAGS(pEFlags);
12739 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12740 IEM_MC_IF_LOCAL_IS_Z(rc) {
12741 IEM_MC_ADVANCE_RIP_AND_FINISH();
12742 } IEM_MC_ELSE() {
12743 IEM_MC_RAISE_DIVIDE_ERROR();
12744 } IEM_MC_ENDIF();
12745
12746 IEM_MC_END();
12747 break;
12748 }
12749
12750 case IEMMODE_32BIT:
12751 {
12752 IEM_MC_BEGIN(4, 1);
12753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12754 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12755 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12756 IEM_MC_ARG(uint32_t, u32Value, 2);
12757 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12758 IEM_MC_LOCAL(int32_t, rc);
12759
12760 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12761 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12762 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12763 IEM_MC_REF_EFLAGS(pEFlags);
12764 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12765 IEM_MC_IF_LOCAL_IS_Z(rc) {
12766 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12767 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12768 IEM_MC_ADVANCE_RIP_AND_FINISH();
12769 } IEM_MC_ELSE() {
12770 IEM_MC_RAISE_DIVIDE_ERROR();
12771 } IEM_MC_ENDIF();
12772
12773 IEM_MC_END();
12774 break;
12775 }
12776
12777 case IEMMODE_64BIT:
12778 {
12779 IEM_MC_BEGIN(4, 1);
12780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12781 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12782 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12783 IEM_MC_ARG(uint64_t, u64Value, 2);
12784 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12785 IEM_MC_LOCAL(int32_t, rc);
12786
12787 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12788 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12789 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12790 IEM_MC_REF_EFLAGS(pEFlags);
12791 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12792 IEM_MC_IF_LOCAL_IS_Z(rc) {
12793 IEM_MC_ADVANCE_RIP_AND_FINISH();
12794 } IEM_MC_ELSE() {
12795 IEM_MC_RAISE_DIVIDE_ERROR();
12796 } IEM_MC_ENDIF();
12797
12798 IEM_MC_END();
12799 break;
12800 }
12801
12802 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12803 }
12804 }
12805 else
12806 {
12807 /* memory access. */
12808 switch (pVCpu->iem.s.enmEffOpSize)
12809 {
12810 case IEMMODE_16BIT:
12811 {
12812 IEM_MC_BEGIN(4, 2);
12813 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12814 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12815 IEM_MC_ARG(uint16_t, u16Value, 2);
12816 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12818 IEM_MC_LOCAL(int32_t, rc);
12819
12820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12822 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12823 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12824 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12825 IEM_MC_REF_EFLAGS(pEFlags);
12826 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12827 IEM_MC_IF_LOCAL_IS_Z(rc) {
12828 IEM_MC_ADVANCE_RIP_AND_FINISH();
12829 } IEM_MC_ELSE() {
12830 IEM_MC_RAISE_DIVIDE_ERROR();
12831 } IEM_MC_ENDIF();
12832
12833 IEM_MC_END();
12834 break;
12835 }
12836
12837 case IEMMODE_32BIT:
12838 {
12839 IEM_MC_BEGIN(4, 2);
12840 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12841 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12842 IEM_MC_ARG(uint32_t, u32Value, 2);
12843 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12844 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12845 IEM_MC_LOCAL(int32_t, rc);
12846
12847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12849 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12850 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12851 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12852 IEM_MC_REF_EFLAGS(pEFlags);
12853 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12854 IEM_MC_IF_LOCAL_IS_Z(rc) {
12855 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12856 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12857 IEM_MC_ADVANCE_RIP_AND_FINISH();
12858 } IEM_MC_ELSE() {
12859 IEM_MC_RAISE_DIVIDE_ERROR();
12860 } IEM_MC_ENDIF();
12861
12862 IEM_MC_END();
12863 break;
12864 }
12865
12866 case IEMMODE_64BIT:
12867 {
12868 IEM_MC_BEGIN(4, 2);
12869 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12870 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12871 IEM_MC_ARG(uint64_t, u64Value, 2);
12872 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12874 IEM_MC_LOCAL(int32_t, rc);
12875
12876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12878 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12879 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12880 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12881 IEM_MC_REF_EFLAGS(pEFlags);
12882 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12883 IEM_MC_IF_LOCAL_IS_Z(rc) {
12884 IEM_MC_ADVANCE_RIP_AND_FINISH();
12885 } IEM_MC_ELSE() {
12886 IEM_MC_RAISE_DIVIDE_ERROR();
12887 } IEM_MC_ENDIF();
12888
12889 IEM_MC_END();
12890 break;
12891 }
12892
12893 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12894 }
12895 }
12896}
12897
12898
12899/**
12900 * @opmaps grp3_f6
12901 * @opcode /2
12902 */
12903FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
12904{
12905 IEMOP_MNEMONIC(not_Eb, "not Eb");
12906 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
12907}
12908
12909
12910/**
12911 * @opmaps grp3_f6
12912 * @opcode /3
12913 */
12914FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
12915{
12916 IEMOP_MNEMONIC(net_Eb, "neg Eb");
12917 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
12918}
12919
12920
12921/**
12922 * @opcode 0xf6
12923 */
12924FNIEMOP_DEF(iemOp_Grp3_Eb)
12925{
12926 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12927 switch (IEM_GET_MODRM_REG_8(bRm))
12928 {
12929 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12930 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12931 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
12932 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
12933 case 4:
12934 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
12935 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12936 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
12937 case 5:
12938 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
12939 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12940 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
12941 case 6:
12942 IEMOP_MNEMONIC(div_Eb, "div Eb");
12943 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12944 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
12945 case 7:
12946 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
12947 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12948 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
12949 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12950 }
12951}
12952
12953
12954/** Opcode 0xf7 /0. */
12955FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
12956{
12957 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
12958 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12959
12960 if (IEM_IS_MODRM_REG_MODE(bRm))
12961 {
12962 /* register access */
12963 switch (pVCpu->iem.s.enmEffOpSize)
12964 {
12965 case IEMMODE_16BIT:
12966 {
12967 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12968 IEM_MC_BEGIN(3, 0);
12969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12970 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12971 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
12972 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12973 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12974 IEM_MC_REF_EFLAGS(pEFlags);
12975 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12976 IEM_MC_ADVANCE_RIP_AND_FINISH();
12977 IEM_MC_END();
12978 break;
12979 }
12980
12981 case IEMMODE_32BIT:
12982 {
12983 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12984 IEM_MC_BEGIN(3, 0);
12985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12986 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12987 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
12988 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12989 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12990 IEM_MC_REF_EFLAGS(pEFlags);
12991 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
12992 /* No clearing the high dword here - test doesn't write back the result. */
12993 IEM_MC_ADVANCE_RIP_AND_FINISH();
12994 IEM_MC_END();
12995 break;
12996 }
12997
12998 case IEMMODE_64BIT:
12999 {
13000 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13001 IEM_MC_BEGIN(3, 0);
13002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13003 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13004 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
13005 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13006 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13007 IEM_MC_REF_EFLAGS(pEFlags);
13008 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13009 IEM_MC_ADVANCE_RIP_AND_FINISH();
13010 IEM_MC_END();
13011 break;
13012 }
13013
13014 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13015 }
13016 }
13017 else
13018 {
13019 /* memory access. */
13020 switch (pVCpu->iem.s.enmEffOpSize)
13021 {
13022 case IEMMODE_16BIT:
13023 {
13024 IEM_MC_BEGIN(3, 2);
13025 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13026 IEM_MC_ARG(uint16_t, u16Src, 1);
13027 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13028 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13029
13030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13031 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13032 IEM_MC_ASSIGN(u16Src, u16Imm);
13033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13034 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13035 IEM_MC_FETCH_EFLAGS(EFlags);
13036 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13037
13038 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
13039 IEM_MC_COMMIT_EFLAGS(EFlags);
13040 IEM_MC_ADVANCE_RIP_AND_FINISH();
13041 IEM_MC_END();
13042 break;
13043 }
13044
13045 case IEMMODE_32BIT:
13046 {
13047 IEM_MC_BEGIN(3, 2);
13048 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13049 IEM_MC_ARG(uint32_t, u32Src, 1);
13050 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13052
13053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13054 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13055 IEM_MC_ASSIGN(u32Src, u32Imm);
13056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13057 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13058 IEM_MC_FETCH_EFLAGS(EFlags);
13059 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13060
13061 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
13062 IEM_MC_COMMIT_EFLAGS(EFlags);
13063 IEM_MC_ADVANCE_RIP_AND_FINISH();
13064 IEM_MC_END();
13065 break;
13066 }
13067
13068 case IEMMODE_64BIT:
13069 {
13070 IEM_MC_BEGIN(3, 2);
13071 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13072 IEM_MC_ARG(uint64_t, u64Src, 1);
13073 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13075
13076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13077 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13078 IEM_MC_ASSIGN(u64Src, u64Imm);
13079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13080 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13081 IEM_MC_FETCH_EFLAGS(EFlags);
13082 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13083
13084 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
13085 IEM_MC_COMMIT_EFLAGS(EFlags);
13086 IEM_MC_ADVANCE_RIP_AND_FINISH();
13087 IEM_MC_END();
13088 break;
13089 }
13090
13091 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13092 }
13093 }
13094}
13095
13096
13097/** Opcode 0xf7 /2. */
13098FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
13099{
13100 IEMOP_MNEMONIC(not_Ev, "not Ev");
13101 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
13102 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
13103}
13104
13105
13106/** Opcode 0xf7 /3. */
13107FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
13108{
13109 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
13110 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
13111 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
13112}
13113
13114
13115/**
13116 * @opcode 0xf7
13117 */
13118FNIEMOP_DEF(iemOp_Grp3_Ev)
13119{
13120 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13121 switch (IEM_GET_MODRM_REG_8(bRm))
13122 {
13123 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13124 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13125 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
13126 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
13127 case 4:
13128 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
13129 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13130 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
13131 case 5:
13132 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
13133 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13134 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
13135 case 6:
13136 IEMOP_MNEMONIC(div_Ev, "div Ev");
13137 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13138 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
13139 case 7:
13140 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
13141 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13142 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
13143 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13144 }
13145}
13146
13147
13148/**
13149 * @opcode 0xf8
13150 */
13151FNIEMOP_DEF(iemOp_clc)
13152{
13153 IEMOP_MNEMONIC(clc, "clc");
13154 IEM_MC_BEGIN(0, 0);
13155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13156 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
13157 IEM_MC_ADVANCE_RIP_AND_FINISH();
13158 IEM_MC_END();
13159}
13160
13161
13162/**
13163 * @opcode 0xf9
13164 */
13165FNIEMOP_DEF(iemOp_stc)
13166{
13167 IEMOP_MNEMONIC(stc, "stc");
13168 IEM_MC_BEGIN(0, 0);
13169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13170 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
13171 IEM_MC_ADVANCE_RIP_AND_FINISH();
13172 IEM_MC_END();
13173}
13174
13175
13176/**
13177 * @opcode 0xfa
13178 */
13179FNIEMOP_DEF(iemOp_cli)
13180{
13181 IEMOP_MNEMONIC(cli, "cli");
13182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13183 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, iemCImpl_cli);
13184}
13185
13186
13187FNIEMOP_DEF(iemOp_sti)
13188{
13189 IEMOP_MNEMONIC(sti, "sti");
13190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13191 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER | IEM_CIMPL_F_VMEXIT, iemCImpl_sti);
13192}
13193
13194
13195/**
13196 * @opcode 0xfc
13197 */
13198FNIEMOP_DEF(iemOp_cld)
13199{
13200 IEMOP_MNEMONIC(cld, "cld");
13201 IEM_MC_BEGIN(0, 0);
13202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13203 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
13204 IEM_MC_ADVANCE_RIP_AND_FINISH();
13205 IEM_MC_END();
13206}
13207
13208
13209/**
13210 * @opcode 0xfd
13211 */
13212FNIEMOP_DEF(iemOp_std)
13213{
13214 IEMOP_MNEMONIC(std, "std");
13215 IEM_MC_BEGIN(0, 0);
13216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13217 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
13218 IEM_MC_ADVANCE_RIP_AND_FINISH();
13219 IEM_MC_END();
13220}
13221
13222
13223/**
13224 * @opmaps grp4
13225 * @opcode /0
13226 */
13227FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
13228{
13229 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
13230 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
13231}
13232
13233
13234/**
13235 * @opmaps grp4
13236 * @opcode /1
13237 */
13238FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
13239{
13240 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
13241 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
13242}
13243
13244
13245/**
13246 * @opcode 0xfe
13247 */
13248FNIEMOP_DEF(iemOp_Grp4)
13249{
13250 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13251 switch (IEM_GET_MODRM_REG_8(bRm))
13252 {
13253 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
13254 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
13255 default:
13256 /** @todo is the eff-addr decoded? */
13257 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
13258 IEMOP_RAISE_INVALID_OPCODE_RET();
13259 }
13260}
13261
13262/** Opcode 0xff /0. */
13263FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
13264{
13265 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
13266 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
13267 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
13268}
13269
13270
13271/** Opcode 0xff /1. */
13272FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
13273{
13274 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
13275 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
13276 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
13277}
13278
13279
13280/**
13281 * Opcode 0xff /2.
13282 * @param bRm The RM byte.
13283 */
13284FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
13285{
13286 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
13287 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13288
13289 if (IEM_IS_MODRM_REG_MODE(bRm))
13290 {
13291 /* The new RIP is taken from a register. */
13292 switch (pVCpu->iem.s.enmEffOpSize)
13293 {
13294 case IEMMODE_16BIT:
13295 IEM_MC_BEGIN(1, 0);
13296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13297 IEM_MC_ARG(uint16_t, u16Target, 0);
13298 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13299 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
13300 IEM_MC_END();
13301 break;
13302
13303 case IEMMODE_32BIT:
13304 IEM_MC_BEGIN(1, 0);
13305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13306 IEM_MC_ARG(uint32_t, u32Target, 0);
13307 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13308 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
13309 IEM_MC_END();
13310 break;
13311
13312 case IEMMODE_64BIT:
13313 IEM_MC_BEGIN(1, 0);
13314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13315 IEM_MC_ARG(uint64_t, u64Target, 0);
13316 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13317 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
13318 IEM_MC_END();
13319 break;
13320
13321 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13322 }
13323 }
13324 else
13325 {
13326 /* The new RIP is taken from a register. */
13327 switch (pVCpu->iem.s.enmEffOpSize)
13328 {
13329 case IEMMODE_16BIT:
13330 IEM_MC_BEGIN(1, 1);
13331 IEM_MC_ARG(uint16_t, u16Target, 0);
13332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13335 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13336 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
13337 IEM_MC_END();
13338 break;
13339
13340 case IEMMODE_32BIT:
13341 IEM_MC_BEGIN(1, 1);
13342 IEM_MC_ARG(uint32_t, u32Target, 0);
13343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13346 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13347 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
13348 IEM_MC_END();
13349 break;
13350
13351 case IEMMODE_64BIT:
13352 IEM_MC_BEGIN(1, 1);
13353 IEM_MC_ARG(uint64_t, u64Target, 0);
13354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13357 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13358 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
13359 IEM_MC_END();
13360 break;
13361
13362 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13363 }
13364 }
13365}
13366
13367#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl) \
13368 /* Registers? How?? */ \
13369 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
13370 { /* likely */ } \
13371 else \
13372 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
13373 \
13374 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
13375 /** @todo what does VIA do? */ \
13376 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
13377 { /* likely */ } \
13378 else \
13379 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
13380 \
13381 /* Far pointer loaded from memory. */ \
13382 switch (pVCpu->iem.s.enmEffOpSize) \
13383 { \
13384 case IEMMODE_16BIT: \
13385 IEM_MC_BEGIN(3, 1); \
13386 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13387 IEM_MC_ARG(uint16_t, offSeg, 1); \
13388 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
13389 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13392 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13393 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
13394 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
13395 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
13396 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13397 IEM_MC_END(); \
13398 break; \
13399 \
13400 case IEMMODE_32BIT: \
13401 IEM_MC_BEGIN(3, 1); \
13402 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13403 IEM_MC_ARG(uint32_t, offSeg, 1); \
13404 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
13405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13408 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13409 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
13410 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
13411 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
13412 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13413 IEM_MC_END(); \
13414 break; \
13415 \
13416 case IEMMODE_64BIT: \
13417 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
13418 IEM_MC_BEGIN(3, 1); \
13419 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13420 IEM_MC_ARG(uint64_t, offSeg, 1); \
13421 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
13422 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13425 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13426 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
13427 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE /* no gates */, \
13428 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13429 IEM_MC_END(); \
13430 break; \
13431 \
13432 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13433 } do {} while (0)
13434
13435
13436/**
13437 * Opcode 0xff /3.
13438 * @param bRm The RM byte.
13439 */
13440FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
13441{
13442 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
13443 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf);
13444}
13445
13446
13447/**
13448 * Opcode 0xff /4.
13449 * @param bRm The RM byte.
13450 */
13451FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
13452{
13453 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
13454 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13455
13456 if (IEM_IS_MODRM_REG_MODE(bRm))
13457 {
13458 /* The new RIP is taken from a register. */
13459 switch (pVCpu->iem.s.enmEffOpSize)
13460 {
13461 case IEMMODE_16BIT:
13462 IEM_MC_BEGIN(0, 1);
13463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13464 IEM_MC_LOCAL(uint16_t, u16Target);
13465 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13466 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
13467 IEM_MC_END();
13468 break;
13469
13470 case IEMMODE_32BIT:
13471 IEM_MC_BEGIN(0, 1);
13472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13473 IEM_MC_LOCAL(uint32_t, u32Target);
13474 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13475 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
13476 IEM_MC_END();
13477 break;
13478
13479 case IEMMODE_64BIT:
13480 IEM_MC_BEGIN(0, 1);
13481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13482 IEM_MC_LOCAL(uint64_t, u64Target);
13483 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13484 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
13485 IEM_MC_END();
13486 break;
13487
13488 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13489 }
13490 }
13491 else
13492 {
13493 /* The new RIP is taken from a memory location. */
13494 switch (pVCpu->iem.s.enmEffOpSize)
13495 {
13496 case IEMMODE_16BIT:
13497 IEM_MC_BEGIN(0, 2);
13498 IEM_MC_LOCAL(uint16_t, u16Target);
13499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13502 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13503 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
13504 IEM_MC_END();
13505 break;
13506
13507 case IEMMODE_32BIT:
13508 IEM_MC_BEGIN(0, 2);
13509 IEM_MC_LOCAL(uint32_t, u32Target);
13510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13513 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13514 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
13515 IEM_MC_END();
13516 break;
13517
13518 case IEMMODE_64BIT:
13519 IEM_MC_BEGIN(0, 2);
13520 IEM_MC_LOCAL(uint64_t, u64Target);
13521 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13524 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13525 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
13526 IEM_MC_END();
13527 break;
13528
13529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13530 }
13531 }
13532}
13533
13534
13535/**
13536 * Opcode 0xff /5.
13537 * @param bRm The RM byte.
13538 */
13539FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
13540{
13541 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
13542 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp);
13543}
13544
13545
13546/**
13547 * Opcode 0xff /6.
13548 * @param bRm The RM byte.
13549 */
13550FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
13551{
13552 IEMOP_MNEMONIC(push_Ev, "push Ev");
13553
13554 /* Registers are handled by a common worker. */
13555 if (IEM_IS_MODRM_REG_MODE(bRm))
13556 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
13557
13558 /* Memory we do here. */
13559 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13560 switch (pVCpu->iem.s.enmEffOpSize)
13561 {
13562 case IEMMODE_16BIT:
13563 IEM_MC_BEGIN(0, 2);
13564 IEM_MC_LOCAL(uint16_t, u16Src);
13565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13568 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13569 IEM_MC_PUSH_U16(u16Src);
13570 IEM_MC_ADVANCE_RIP_AND_FINISH();
13571 IEM_MC_END();
13572 break;
13573
13574 case IEMMODE_32BIT:
13575 IEM_MC_BEGIN(0, 2);
13576 IEM_MC_LOCAL(uint32_t, u32Src);
13577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13580 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13581 IEM_MC_PUSH_U32(u32Src);
13582 IEM_MC_ADVANCE_RIP_AND_FINISH();
13583 IEM_MC_END();
13584 break;
13585
13586 case IEMMODE_64BIT:
13587 IEM_MC_BEGIN(0, 2);
13588 IEM_MC_LOCAL(uint64_t, u64Src);
13589 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13590 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13592 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13593 IEM_MC_PUSH_U64(u64Src);
13594 IEM_MC_ADVANCE_RIP_AND_FINISH();
13595 IEM_MC_END();
13596 break;
13597
13598 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13599 }
13600}
13601
13602
13603/**
13604 * @opcode 0xff
13605 */
13606FNIEMOP_DEF(iemOp_Grp5)
13607{
13608 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13609 switch (IEM_GET_MODRM_REG_8(bRm))
13610 {
13611 case 0:
13612 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
13613 case 1:
13614 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
13615 case 2:
13616 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
13617 case 3:
13618 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
13619 case 4:
13620 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
13621 case 5:
13622 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
13623 case 6:
13624 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
13625 case 7:
13626 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
13627 IEMOP_RAISE_INVALID_OPCODE_RET();
13628 }
13629 AssertFailedReturn(VERR_IEM_IPE_3);
13630}
13631
13632
13633
13634const PFNIEMOP g_apfnOneByteMap[256] =
13635{
13636 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
13637 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
13638 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
13639 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
13640 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
13641 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
13642 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
13643 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
13644 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
13645 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
13646 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
13647 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
13648 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
13649 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
13650 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
13651 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
13652 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
13653 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
13654 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
13655 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
13656 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
13657 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
13658 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
13659 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
13660 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
13661 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
13662 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
13663 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
13664 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
13665 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
13666 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
13667 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
13668 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
13669 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
13670 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
13671 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
13672 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
13673 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
13674 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
13675 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
13676 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
13677 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
13678 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
13679 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
13680 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
13681 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
13682 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
13683 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
13684 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
13685 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
13686 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
13687 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
13688 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
13689 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
13690 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
13691 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
13692 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
13693 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
13694 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
13695 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
13696 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
13697 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
13698 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
13699 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
13700};
13701
13702
13703/** @} */
13704
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette