VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 100835

Last change on this file since 100835 was 100835, checked in by vboxsync, 19 months ago

VMM/IEM: More conversion from IEM_MC_MEM_MAP to IEM_MC_MEM_MAP_XXX. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 484.4 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 100835 2023-08-09 14:56:15Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 *
63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
64 */
65#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8) \
66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
67 \
68 /* \
69 * If rm is denoting a register, no more instruction bytes. \
70 */ \
71 if (IEM_IS_MODRM_REG_MODE(bRm)) \
72 { \
73 IEM_MC_BEGIN(3, 0); \
74 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
75 IEM_MC_ARG(uint8_t, u8Src, 1); \
76 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
77 \
78 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
79 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
80 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
81 IEM_MC_REF_EFLAGS(pEFlags); \
82 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
83 \
84 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
85 IEM_MC_END(); \
86 } \
87 else \
88 { \
89 /* \
90 * We're accessing memory. \
91 * Note! We're putting the eflags on the stack here so we can commit them \
92 * after the memory. \
93 */ \
94 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
95 { \
96 IEM_MC_BEGIN(3, 3); \
97 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
98 IEM_MC_ARG(uint8_t, u8Src, 1); \
99 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
101 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
102 \
103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
104 IEMOP_HLP_DONE_DECODING(); \
105 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
106 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
107 IEM_MC_FETCH_EFLAGS(EFlags); \
108 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
109 \
110 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
111 IEM_MC_COMMIT_EFLAGS(EFlags); \
112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
113 IEM_MC_END(); \
114 } \
115 else \
116 { \
117 (void)0
118
119/**
120 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
121 * operands.
122 *
123 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
124 */
125#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8) \
126 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
127 \
128 /* \
129 * If rm is denoting a register, no more instruction bytes. \
130 */ \
131 if (IEM_IS_MODRM_REG_MODE(bRm)) \
132 { \
133 IEM_MC_BEGIN(3, 0); \
134 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
135 IEM_MC_ARG(uint8_t, u8Src, 1); \
136 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
137 \
138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
139 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
140 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
141 IEM_MC_REF_EFLAGS(pEFlags); \
142 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
143 \
144 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
145 IEM_MC_END(); \
146 } \
147 else \
148 { \
149 /* \
150 * We're accessing memory. \
151 * Note! We're putting the eflags on the stack here so we can commit them \
152 * after the memory. \
153 */ \
154 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
155 { \
156 IEM_MC_BEGIN(3, 3); \
157 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
158 IEM_MC_ARG(uint8_t, u8Src, 1); \
159 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
161 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
162 \
163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
164 IEMOP_HLP_DONE_DECODING(); \
165 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
166 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
167 IEM_MC_FETCH_EFLAGS(EFlags); \
168 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
169 \
170 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
171 IEM_MC_COMMIT_EFLAGS(EFlags); \
172 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
173 IEM_MC_END(); \
174 } \
175 else \
176 { \
177 (void)0
178
179#define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \
180 IEMOP_HLP_DONE_DECODING(); \
181 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
182 } \
183 } \
184 (void)0
185
186#define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \
187 IEM_MC_BEGIN(3, 3); \
188 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
189 IEM_MC_ARG(uint8_t, u8Src, 1); \
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
192 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
193 \
194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
195 IEMOP_HLP_DONE_DECODING(); \
196 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
197 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
198 IEM_MC_FETCH_EFLAGS(EFlags); \
199 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
200 \
201 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bMapInfoDst); \
202 IEM_MC_COMMIT_EFLAGS(EFlags); \
203 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
204 IEM_MC_END(); \
205 } \
206 } \
207 (void)0
208
209/**
210 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
211 * destination.
212 */
213#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
215 \
216 /* \
217 * If rm is denoting a register, no more instruction bytes. \
218 */ \
219 if (IEM_IS_MODRM_REG_MODE(bRm)) \
220 { \
221 IEM_MC_BEGIN(3, 0); \
222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
223 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
224 IEM_MC_ARG(uint8_t, u8Src, 1); \
225 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
226 \
227 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
228 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
229 IEM_MC_REF_EFLAGS(pEFlags); \
230 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
231 \
232 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
233 IEM_MC_END(); \
234 } \
235 else \
236 { \
237 /* \
238 * We're accessing memory. \
239 */ \
240 IEM_MC_BEGIN(3, 1); \
241 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
242 IEM_MC_ARG(uint8_t, u8Src, 1); \
243 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
245 \
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
248 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
249 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
250 IEM_MC_REF_EFLAGS(pEFlags); \
251 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
252 \
253 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
254 IEM_MC_END(); \
255 } \
256 (void)0
257
258
259/**
260 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
261 * memory/register as the destination.
262 */
263#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
265 \
266 /* \
267 * If rm is denoting a register, no more instruction bytes. \
268 */ \
269 if (IEM_IS_MODRM_REG_MODE(bRm)) \
270 { \
271 switch (pVCpu->iem.s.enmEffOpSize) \
272 { \
273 case IEMMODE_16BIT: \
274 IEM_MC_BEGIN(3, 0); \
275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
276 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
277 IEM_MC_ARG(uint16_t, u16Src, 1); \
278 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
279 \
280 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
281 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
282 IEM_MC_REF_EFLAGS(pEFlags); \
283 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
284 \
285 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
286 IEM_MC_END(); \
287 break; \
288 \
289 case IEMMODE_32BIT: \
290 IEM_MC_BEGIN(3, 0); \
291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
292 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
293 IEM_MC_ARG(uint32_t, u32Src, 1); \
294 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
295 \
296 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
297 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
298 IEM_MC_REF_EFLAGS(pEFlags); \
299 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
300 \
301 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
302 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
303 IEM_MC_END(); \
304 break; \
305 \
306 case IEMMODE_64BIT: \
307 IEM_MC_BEGIN(3, 0); \
308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
309 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
310 IEM_MC_ARG(uint64_t, u64Src, 1); \
311 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
312 \
313 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
314 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
315 IEM_MC_REF_EFLAGS(pEFlags); \
316 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
317 \
318 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
319 IEM_MC_END(); \
320 break; \
321 \
322 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
323 } \
324 } \
325 else \
326 { \
327 /* \
328 * We're accessing memory. \
329 * Note! We're putting the eflags on the stack here so we can commit them \
330 * after the memory. \
331 */ \
332 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
333 { \
334 switch (pVCpu->iem.s.enmEffOpSize) \
335 { \
336 case IEMMODE_16BIT: \
337 IEM_MC_BEGIN(3, 3); \
338 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
339 IEM_MC_ARG(uint16_t, u16Src, 1); \
340 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
342 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
343 \
344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
345 IEMOP_HLP_DONE_DECODING(); \
346 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
347 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
348 IEM_MC_FETCH_EFLAGS(EFlags); \
349 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
350 \
351 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
352 IEM_MC_COMMIT_EFLAGS(EFlags); \
353 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
354 IEM_MC_END(); \
355 break; \
356 \
357 case IEMMODE_32BIT: \
358 IEM_MC_BEGIN(3, 3); \
359 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
360 IEM_MC_ARG(uint32_t, u32Src, 1); \
361 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
363 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
364 \
365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
366 IEMOP_HLP_DONE_DECODING(); \
367 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
368 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
369 IEM_MC_FETCH_EFLAGS(EFlags); \
370 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
371 \
372 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
373 IEM_MC_COMMIT_EFLAGS(EFlags); \
374 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
375 IEM_MC_END(); \
376 break; \
377 \
378 case IEMMODE_64BIT: \
379 IEM_MC_BEGIN(3, 3); \
380 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
381 IEM_MC_ARG(uint64_t, u64Src, 1); \
382 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
384 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
385 \
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
387 IEMOP_HLP_DONE_DECODING(); \
388 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
389 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
390 IEM_MC_FETCH_EFLAGS(EFlags); \
391 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
392 \
393 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
394 IEM_MC_COMMIT_EFLAGS(EFlags); \
395 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
396 IEM_MC_END(); \
397 break; \
398 \
399 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
400 } \
401 } \
402 else \
403 { \
404 (void)0
405/* Separate macro to work around parsing issue in IEMAllInstPython.py */
406#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
407 switch (pVCpu->iem.s.enmEffOpSize) \
408 { \
409 case IEMMODE_16BIT: \
410 IEM_MC_BEGIN(3, 3); \
411 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
412 IEM_MC_ARG(uint16_t, u16Src, 1); \
413 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
415 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
416 \
417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
418 IEMOP_HLP_DONE_DECODING(); \
419 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
420 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
421 IEM_MC_FETCH_EFLAGS(EFlags); \
422 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
423 \
424 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
425 IEM_MC_COMMIT_EFLAGS(EFlags); \
426 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
427 IEM_MC_END(); \
428 break; \
429 \
430 case IEMMODE_32BIT: \
431 IEM_MC_BEGIN(3, 3); \
432 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
433 IEM_MC_ARG(uint32_t, u32Src, 1); \
434 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
436 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
437 \
438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
439 IEMOP_HLP_DONE_DECODING(); \
440 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
441 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
442 IEM_MC_FETCH_EFLAGS(EFlags); \
443 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
444 \
445 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo /* CMP,TEST */); \
446 IEM_MC_COMMIT_EFLAGS(EFlags); \
447 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
448 IEM_MC_END(); \
449 break; \
450 \
451 case IEMMODE_64BIT: \
452 IEM_MC_BEGIN(3, 3); \
453 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
454 IEM_MC_ARG(uint64_t, u64Src, 1); \
455 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
457 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
458 \
459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
460 IEMOP_HLP_DONE_DECODING(); \
461 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
462 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
463 IEM_MC_FETCH_EFLAGS(EFlags); \
464 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
465 \
466 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
467 IEM_MC_COMMIT_EFLAGS(EFlags); \
468 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
469 IEM_MC_END(); \
470 break; \
471 \
472 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
473 } \
474 } \
475 } \
476 (void)0
477
478/**
479 * Body for read-only word/dword/qword instructions like TEST and CMP with
480 * memory/register as the destination.
481 */
482#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
484 \
485 /* \
486 * If rm is denoting a register, no more instruction bytes. \
487 */ \
488 if (IEM_IS_MODRM_REG_MODE(bRm)) \
489 { \
490 switch (pVCpu->iem.s.enmEffOpSize) \
491 { \
492 case IEMMODE_16BIT: \
493 IEM_MC_BEGIN(3, 0); \
494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
495 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
496 IEM_MC_ARG(uint16_t, u16Src, 1); \
497 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
498 \
499 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
500 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
501 IEM_MC_REF_EFLAGS(pEFlags); \
502 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
503 \
504 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
505 IEM_MC_END(); \
506 break; \
507 \
508 case IEMMODE_32BIT: \
509 IEM_MC_BEGIN(3, 0); \
510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
511 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
512 IEM_MC_ARG(uint32_t, u32Src, 1); \
513 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
514 \
515 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
516 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
517 IEM_MC_REF_EFLAGS(pEFlags); \
518 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
519 \
520 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
521 IEM_MC_END(); \
522 break; \
523 \
524 case IEMMODE_64BIT: \
525 IEM_MC_BEGIN(3, 0); \
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
527 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
528 IEM_MC_ARG(uint64_t, u64Src, 1); \
529 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
530 \
531 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
532 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
533 IEM_MC_REF_EFLAGS(pEFlags); \
534 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
535 \
536 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
537 IEM_MC_END(); \
538 break; \
539 \
540 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
541 } \
542 } \
543 else \
544 { \
545 /* \
546 * We're accessing memory. \
547 * Note! We're putting the eflags on the stack here so we can commit them \
548 * after the memory. \
549 */ \
550 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
551 { \
552 switch (pVCpu->iem.s.enmEffOpSize) \
553 { \
554 case IEMMODE_16BIT: \
555 IEM_MC_BEGIN(3, 3); \
556 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
557 IEM_MC_ARG(uint16_t, u16Src, 1); \
558 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
560 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
561 \
562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
563 IEMOP_HLP_DONE_DECODING(); \
564 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
565 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
566 IEM_MC_FETCH_EFLAGS(EFlags); \
567 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
568 \
569 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
570 IEM_MC_COMMIT_EFLAGS(EFlags); \
571 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
572 IEM_MC_END(); \
573 break; \
574 \
575 case IEMMODE_32BIT: \
576 IEM_MC_BEGIN(3, 3); \
577 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
578 IEM_MC_ARG(uint32_t, u32Src, 1); \
579 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
581 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
582 \
583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
584 IEMOP_HLP_DONE_DECODING(); \
585 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
586 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
587 IEM_MC_FETCH_EFLAGS(EFlags); \
588 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
589 \
590 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
591 IEM_MC_COMMIT_EFLAGS(EFlags); \
592 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
593 IEM_MC_END(); \
594 break; \
595 \
596 case IEMMODE_64BIT: \
597 IEM_MC_BEGIN(3, 3); \
598 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
599 IEM_MC_ARG(uint64_t, u64Src, 1); \
600 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
602 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
603 \
604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
605 IEMOP_HLP_DONE_DECODING(); \
606 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
607 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
608 IEM_MC_FETCH_EFLAGS(EFlags); \
609 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
610 \
611 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
612 IEM_MC_COMMIT_EFLAGS(EFlags); \
613 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
614 IEM_MC_END(); \
615 break; \
616 \
617 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
618 } \
619 } \
620 else \
621 { \
622 IEMOP_HLP_DONE_DECODING(); \
623 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
624 } \
625 } \
626 (void)0
627
628
629/**
630 * Body for instructions like ADD, AND, OR, ++ with working on AL with
631 * a byte immediate.
632 */
633#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
634 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
635 \
636 IEM_MC_BEGIN(3, 0); \
637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
638 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
639 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
640 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
641 \
642 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
643 IEM_MC_REF_EFLAGS(pEFlags); \
644 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
645 \
646 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
647 IEM_MC_END()
648
649/**
650 * Body for instructions like ADD, AND, OR, ++ with working on
651 * AX/EAX/RAX with a word/dword immediate.
652 */
653#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
654 switch (pVCpu->iem.s.enmEffOpSize) \
655 { \
656 case IEMMODE_16BIT: \
657 { \
658 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
659 \
660 IEM_MC_BEGIN(3, 0); \
661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
662 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
663 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
664 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
665 \
666 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
667 IEM_MC_REF_EFLAGS(pEFlags); \
668 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
669 \
670 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
671 IEM_MC_END(); \
672 } \
673 \
674 case IEMMODE_32BIT: \
675 { \
676 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
677 \
678 IEM_MC_BEGIN(3, 0); \
679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
680 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
681 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
682 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
683 \
684 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
685 IEM_MC_REF_EFLAGS(pEFlags); \
686 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
687 \
688 if (a_fModifiesDstReg) \
689 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
690 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
691 IEM_MC_END(); \
692 } \
693 \
694 case IEMMODE_64BIT: \
695 { \
696 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
697 \
698 IEM_MC_BEGIN(3, 0); \
699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
700 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
701 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
702 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
703 \
704 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
705 IEM_MC_REF_EFLAGS(pEFlags); \
706 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
707 \
708 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
709 IEM_MC_END(); \
710 } \
711 \
712 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
713 } \
714 (void)0
715
716
717
718/* Instruction specification format - work in progress: */
719
720/**
721 * @opcode 0x00
722 * @opmnemonic add
723 * @op1 rm:Eb
724 * @op2 reg:Gb
725 * @opmaps one
726 * @openc ModR/M
727 * @opflmodify cf,pf,af,zf,sf,of
728 * @ophints harmless ignores_op_sizes
729 * @opstats add_Eb_Gb
730 * @opgroup og_gen_arith_bin
731 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
732 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
733 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
734 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
735 */
736FNIEMOP_DEF(iemOp_add_Eb_Gb)
737{
738 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
739 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_add_u8);
740 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked);
741}
742
743
744/**
745 * @opcode 0x01
746 * @opgroup og_gen_arith_bin
747 * @opflmodify cf,pf,af,zf,sf,of
748 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
749 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
750 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
751 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
752 */
753FNIEMOP_DEF(iemOp_add_Ev_Gv)
754{
755 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
756 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
757 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
758}
759
760
761/**
762 * @opcode 0x02
763 * @opgroup og_gen_arith_bin
764 * @opflmodify cf,pf,af,zf,sf,of
765 * @opcopytests iemOp_add_Eb_Gb
766 */
767FNIEMOP_DEF(iemOp_add_Gb_Eb)
768{
769 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
770 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
771}
772
773
774/**
775 * @opcode 0x03
776 * @opgroup og_gen_arith_bin
777 * @opflmodify cf,pf,af,zf,sf,of
778 * @opcopytests iemOp_add_Ev_Gv
779 */
780FNIEMOP_DEF(iemOp_add_Gv_Ev)
781{
782 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
783 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
784}
785
786
787/**
788 * @opcode 0x04
789 * @opgroup og_gen_arith_bin
790 * @opflmodify cf,pf,af,zf,sf,of
791 * @opcopytests iemOp_add_Eb_Gb
792 */
793FNIEMOP_DEF(iemOp_add_Al_Ib)
794{
795 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
796 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
797}
798
799
800/**
801 * @opcode 0x05
802 * @opgroup og_gen_arith_bin
803 * @opflmodify cf,pf,af,zf,sf,of
804 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
805 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
806 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
807 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
808 */
809FNIEMOP_DEF(iemOp_add_eAX_Iz)
810{
811 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
812 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
813}
814
815
816/**
817 * @opcode 0x06
818 * @opgroup og_stack_sreg
819 */
820FNIEMOP_DEF(iemOp_push_ES)
821{
822 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
823 IEMOP_HLP_NO_64BIT();
824 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
825}
826
827
828/**
829 * @opcode 0x07
830 * @opgroup og_stack_sreg
831 */
832FNIEMOP_DEF(iemOp_pop_ES)
833{
834 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
835 IEMOP_HLP_NO_64BIT();
836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
837 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
838}
839
840
841/**
842 * @opcode 0x08
843 * @opgroup og_gen_arith_bin
844 * @opflmodify cf,pf,af,zf,sf,of
845 * @opflundef af
846 * @opflclear of,cf
847 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
848 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
849 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
850 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
851 */
852FNIEMOP_DEF(iemOp_or_Eb_Gb)
853{
854 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
855 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
856 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_or_u8);
857 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked);
858}
859
860
861/*
862 * @opcode 0x09
863 * @opgroup og_gen_arith_bin
864 * @opflmodify cf,pf,af,zf,sf,of
865 * @opflundef af
866 * @opflclear of,cf
867 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
868 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
869 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
870 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
871 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
872 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
873 */
874FNIEMOP_DEF(iemOp_or_Ev_Gv)
875{
876 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
877 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
878 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
879 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
880}
881
882
883/**
884 * @opcode 0x0a
885 * @opgroup og_gen_arith_bin
886 * @opflmodify cf,pf,af,zf,sf,of
887 * @opflundef af
888 * @opflclear of,cf
889 * @opcopytests iemOp_or_Eb_Gb
890 */
891FNIEMOP_DEF(iemOp_or_Gb_Eb)
892{
893 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
894 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
895 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
896}
897
898
899/**
900 * @opcode 0x0b
901 * @opgroup og_gen_arith_bin
902 * @opflmodify cf,pf,af,zf,sf,of
903 * @opflundef af
904 * @opflclear of,cf
905 * @opcopytests iemOp_or_Ev_Gv
906 */
907FNIEMOP_DEF(iemOp_or_Gv_Ev)
908{
909 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
910 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
911 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
912}
913
914
915/**
916 * @opcode 0x0c
917 * @opgroup og_gen_arith_bin
918 * @opflmodify cf,pf,af,zf,sf,of
919 * @opflundef af
920 * @opflclear of,cf
921 * @opcopytests iemOp_or_Eb_Gb
922 */
923FNIEMOP_DEF(iemOp_or_Al_Ib)
924{
925 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
926 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
927 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
928}
929
930
931/**
932 * @opcode 0x0d
933 * @opgroup og_gen_arith_bin
934 * @opflmodify cf,pf,af,zf,sf,of
935 * @opflundef af
936 * @opflclear of,cf
937 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
938 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
939 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
940 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
941 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
942 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
943 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
944 */
945FNIEMOP_DEF(iemOp_or_eAX_Iz)
946{
947 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
948 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
949 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
950}
951
952
953/**
954 * @opcode 0x0e
955 * @opgroup og_stack_sreg
956 */
957FNIEMOP_DEF(iemOp_push_CS)
958{
959 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
960 IEMOP_HLP_NO_64BIT();
961 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
962}
963
964
965/**
966 * @opcode 0x0f
967 * @opmnemonic EscTwo0f
968 * @openc two0f
969 * @opdisenum OP_2B_ESC
970 * @ophints harmless
971 * @opgroup og_escapes
972 */
973FNIEMOP_DEF(iemOp_2byteEscape)
974{
975#if 0 /// @todo def VBOX_STRICT
976 /* Sanity check the table the first time around. */
977 static bool s_fTested = false;
978 if (RT_LIKELY(s_fTested)) { /* likely */ }
979 else
980 {
981 s_fTested = true;
982 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
983 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
984 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
985 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
986 }
987#endif
988
989 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
990 {
991 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
992 IEMOP_HLP_MIN_286();
993 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
994 }
995 /* @opdone */
996
997 /*
998 * On the 8086 this is a POP CS instruction.
999 * For the time being we don't specify this this.
1000 */
1001 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
1002 IEMOP_HLP_NO_64BIT();
1003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1004 /** @todo eliminate END_TB here */
1005 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1006 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1007}
1008
1009/**
1010 * @opcode 0x10
1011 * @opgroup og_gen_arith_bin
1012 * @opfltest cf
1013 * @opflmodify cf,pf,af,zf,sf,of
1014 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1015 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1016 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1017 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1018 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1019 */
1020FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1021{
1022 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1023 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_adc_u8);
1024 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked);
1025}
1026
1027
1028/**
1029 * @opcode 0x11
1030 * @opgroup og_gen_arith_bin
1031 * @opfltest cf
1032 * @opflmodify cf,pf,af,zf,sf,of
1033 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1034 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1035 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1036 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1037 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1038 */
1039FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1040{
1041 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1042 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
1043 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1044}
1045
1046
1047/**
1048 * @opcode 0x12
1049 * @opgroup og_gen_arith_bin
1050 * @opfltest cf
1051 * @opflmodify cf,pf,af,zf,sf,of
1052 * @opcopytests iemOp_adc_Eb_Gb
1053 */
1054FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1055{
1056 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1057 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
1058}
1059
1060
1061/**
1062 * @opcode 0x13
1063 * @opgroup og_gen_arith_bin
1064 * @opfltest cf
1065 * @opflmodify cf,pf,af,zf,sf,of
1066 * @opcopytests iemOp_adc_Ev_Gv
1067 */
1068FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1069{
1070 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1071 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1072}
1073
1074
1075/**
1076 * @opcode 0x14
1077 * @opgroup og_gen_arith_bin
1078 * @opfltest cf
1079 * @opflmodify cf,pf,af,zf,sf,of
1080 * @opcopytests iemOp_adc_Eb_Gb
1081 */
1082FNIEMOP_DEF(iemOp_adc_Al_Ib)
1083{
1084 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1085 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1086}
1087
1088
1089/**
1090 * @opcode 0x15
1091 * @opgroup og_gen_arith_bin
1092 * @opfltest cf
1093 * @opflmodify cf,pf,af,zf,sf,of
1094 * @opcopytests iemOp_adc_Ev_Gv
1095 */
1096FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1097{
1098 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1099 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1100}
1101
1102
1103/**
1104 * @opcode 0x16
1105 */
1106FNIEMOP_DEF(iemOp_push_SS)
1107{
1108 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1109 IEMOP_HLP_NO_64BIT();
1110 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1111}
1112
1113
1114/**
1115 * @opcode 0x17
1116 * @opgroup og_gen_arith_bin
1117 * @opfltest cf
1118 * @opflmodify cf,pf,af,zf,sf,of
1119 */
1120FNIEMOP_DEF(iemOp_pop_SS)
1121{
1122 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1124 IEMOP_HLP_NO_64BIT();
1125 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1126}
1127
1128
1129/**
1130 * @opcode 0x18
1131 * @opgroup og_gen_arith_bin
1132 * @opfltest cf
1133 * @opflmodify cf,pf,af,zf,sf,of
1134 */
1135FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1136{
1137 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1138 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sbb_u8);
1139 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked);
1140}
1141
1142
1143/**
1144 * @opcode 0x19
1145 * @opgroup og_gen_arith_bin
1146 * @opfltest cf
1147 * @opflmodify cf,pf,af,zf,sf,of
1148 */
1149FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1150{
1151 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1152 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
1153 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1154}
1155
1156
1157/**
1158 * @opcode 0x1a
1159 * @opgroup og_gen_arith_bin
1160 * @opfltest cf
1161 * @opflmodify cf,pf,af,zf,sf,of
1162 */
1163FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1164{
1165 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1166 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
1167}
1168
1169
1170/**
1171 * @opcode 0x1b
1172 * @opgroup og_gen_arith_bin
1173 * @opfltest cf
1174 * @opflmodify cf,pf,af,zf,sf,of
1175 */
1176FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1177{
1178 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1179 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1180}
1181
1182
1183/**
1184 * @opcode 0x1c
1185 * @opgroup og_gen_arith_bin
1186 * @opfltest cf
1187 * @opflmodify cf,pf,af,zf,sf,of
1188 */
1189FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1190{
1191 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1192 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1193}
1194
1195
1196/**
1197 * @opcode 0x1d
1198 * @opgroup og_gen_arith_bin
1199 * @opfltest cf
1200 * @opflmodify cf,pf,af,zf,sf,of
1201 */
1202FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1203{
1204 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1205 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1206}
1207
1208
1209/**
1210 * @opcode 0x1e
1211 * @opgroup og_stack_sreg
1212 */
1213FNIEMOP_DEF(iemOp_push_DS)
1214{
1215 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1216 IEMOP_HLP_NO_64BIT();
1217 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1218}
1219
1220
1221/**
1222 * @opcode 0x1f
1223 * @opgroup og_stack_sreg
1224 */
1225FNIEMOP_DEF(iemOp_pop_DS)
1226{
1227 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1229 IEMOP_HLP_NO_64BIT();
1230 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1231}
1232
1233
1234/**
1235 * @opcode 0x20
1236 * @opgroup og_gen_arith_bin
1237 * @opflmodify cf,pf,af,zf,sf,of
1238 * @opflundef af
1239 * @opflclear of,cf
1240 */
1241FNIEMOP_DEF(iemOp_and_Eb_Gb)
1242{
1243 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1244 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1245 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_and_u8);
1246 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked);
1247}
1248
1249
1250/**
1251 * @opcode 0x21
1252 * @opgroup og_gen_arith_bin
1253 * @opflmodify cf,pf,af,zf,sf,of
1254 * @opflundef af
1255 * @opflclear of,cf
1256 */
1257FNIEMOP_DEF(iemOp_and_Ev_Gv)
1258{
1259 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1260 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1261 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
1262 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1263}
1264
1265
1266/**
1267 * @opcode 0x22
1268 * @opgroup og_gen_arith_bin
1269 * @opflmodify cf,pf,af,zf,sf,of
1270 * @opflundef af
1271 * @opflclear of,cf
1272 */
1273FNIEMOP_DEF(iemOp_and_Gb_Eb)
1274{
1275 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1276 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1277 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1278}
1279
1280
1281/**
1282 * @opcode 0x23
1283 * @opgroup og_gen_arith_bin
1284 * @opflmodify cf,pf,af,zf,sf,of
1285 * @opflundef af
1286 * @opflclear of,cf
1287 */
1288FNIEMOP_DEF(iemOp_and_Gv_Ev)
1289{
1290 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1291 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1292 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1293}
1294
1295
1296/**
1297 * @opcode 0x24
1298 * @opgroup og_gen_arith_bin
1299 * @opflmodify cf,pf,af,zf,sf,of
1300 * @opflundef af
1301 * @opflclear of,cf
1302 */
1303FNIEMOP_DEF(iemOp_and_Al_Ib)
1304{
1305 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1306 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1307 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1308}
1309
1310
1311/**
1312 * @opcode 0x25
1313 * @opgroup og_gen_arith_bin
1314 * @opflmodify cf,pf,af,zf,sf,of
1315 * @opflundef af
1316 * @opflclear of,cf
1317 */
1318FNIEMOP_DEF(iemOp_and_eAX_Iz)
1319{
1320 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1321 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1322 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1323}
1324
1325
1326/**
1327 * @opcode 0x26
1328 * @opmnemonic SEG
1329 * @op1 ES
1330 * @opgroup og_prefix
1331 * @openc prefix
1332 * @opdisenum OP_SEG
1333 * @ophints harmless
1334 */
1335FNIEMOP_DEF(iemOp_seg_ES)
1336{
1337 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1338 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1339 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1340
1341 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1342 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1343}
1344
1345
1346/**
1347 * @opcode 0x27
1348 * @opfltest af,cf
1349 * @opflmodify cf,pf,af,zf,sf,of
1350 * @opflundef of
1351 */
1352FNIEMOP_DEF(iemOp_daa)
1353{
1354 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1355 IEMOP_HLP_NO_64BIT();
1356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1357 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1358 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_daa);
1359}
1360
1361
1362/**
1363 * @opcode 0x28
1364 * @opgroup og_gen_arith_bin
1365 * @opflmodify cf,pf,af,zf,sf,of
1366 */
1367FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1368{
1369 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1370 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sub_u8);
1371 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked);
1372}
1373
1374
1375/**
1376 * @opcode 0x29
1377 * @opgroup og_gen_arith_bin
1378 * @opflmodify cf,pf,af,zf,sf,of
1379 */
1380FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1381{
1382 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1383 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
1384 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1385}
1386
1387
1388/**
1389 * @opcode 0x2a
1390 * @opgroup og_gen_arith_bin
1391 * @opflmodify cf,pf,af,zf,sf,of
1392 */
1393FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1394{
1395 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1396 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1397}
1398
1399
1400/**
1401 * @opcode 0x2b
1402 * @opgroup og_gen_arith_bin
1403 * @opflmodify cf,pf,af,zf,sf,of
1404 */
1405FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1406{
1407 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1408 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1409}
1410
1411
1412/**
1413 * @opcode 0x2c
1414 * @opgroup og_gen_arith_bin
1415 * @opflmodify cf,pf,af,zf,sf,of
1416 */
1417FNIEMOP_DEF(iemOp_sub_Al_Ib)
1418{
1419 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1420 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1421}
1422
1423
1424/**
1425 * @opcode 0x2d
1426 * @opgroup og_gen_arith_bin
1427 * @opflmodify cf,pf,af,zf,sf,of
1428 */
1429FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1430{
1431 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1432 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1433}
1434
1435
1436/**
1437 * @opcode 0x2e
1438 * @opmnemonic SEG
1439 * @op1 CS
1440 * @opgroup og_prefix
1441 * @openc prefix
1442 * @opdisenum OP_SEG
1443 * @ophints harmless
1444 */
1445FNIEMOP_DEF(iemOp_seg_CS)
1446{
1447 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1448 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1449 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1450
1451 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1452 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1453}
1454
1455
1456/**
1457 * @opcode 0x2f
1458 * @opfltest af,cf
1459 * @opflmodify cf,pf,af,zf,sf,of
1460 * @opflundef of
1461 */
1462FNIEMOP_DEF(iemOp_das)
1463{
1464 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1465 IEMOP_HLP_NO_64BIT();
1466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1467 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1468 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_das);
1469}
1470
1471
1472/**
1473 * @opcode 0x30
1474 * @opgroup og_gen_arith_bin
1475 * @opflmodify cf,pf,af,zf,sf,of
1476 * @opflundef af
1477 * @opflclear of,cf
1478 */
1479FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1480{
1481 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1482 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1483 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_xor_u8);
1484 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked);
1485}
1486
1487
1488/**
1489 * @opcode 0x31
1490 * @opgroup og_gen_arith_bin
1491 * @opflmodify cf,pf,af,zf,sf,of
1492 * @opflundef af
1493 * @opflclear of,cf
1494 */
1495FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1496{
1497 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1498 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1499 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
1500 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1501}
1502
1503
1504/**
1505 * @opcode 0x32
1506 * @opgroup og_gen_arith_bin
1507 * @opflmodify cf,pf,af,zf,sf,of
1508 * @opflundef af
1509 * @opflclear of,cf
1510 */
1511FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1512{
1513 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1514 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1515 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1516}
1517
1518
1519/**
1520 * @opcode 0x33
1521 * @opgroup og_gen_arith_bin
1522 * @opflmodify cf,pf,af,zf,sf,of
1523 * @opflundef af
1524 * @opflclear of,cf
1525 */
1526FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1527{
1528 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1529 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1530 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1531}
1532
1533
1534/**
1535 * @opcode 0x34
1536 * @opgroup og_gen_arith_bin
1537 * @opflmodify cf,pf,af,zf,sf,of
1538 * @opflundef af
1539 * @opflclear of,cf
1540 */
1541FNIEMOP_DEF(iemOp_xor_Al_Ib)
1542{
1543 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1544 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1545 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1546}
1547
1548
1549/**
1550 * @opcode 0x35
1551 * @opgroup og_gen_arith_bin
1552 * @opflmodify cf,pf,af,zf,sf,of
1553 * @opflundef af
1554 * @opflclear of,cf
1555 */
1556FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1557{
1558 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1559 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1560 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1561}
1562
1563
1564/**
1565 * @opcode 0x36
1566 * @opmnemonic SEG
1567 * @op1 SS
1568 * @opgroup og_prefix
1569 * @openc prefix
1570 * @opdisenum OP_SEG
1571 * @ophints harmless
1572 */
1573FNIEMOP_DEF(iemOp_seg_SS)
1574{
1575 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1576 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1577 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1578
1579 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1580 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1581}
1582
1583
1584/**
1585 * @opcode 0x37
1586 * @opfltest af,cf
1587 * @opflmodify cf,pf,af,zf,sf,of
1588 * @opflundef pf,zf,sf,of
1589 * @opgroup og_gen_arith_dec
1590 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1591 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1592 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1593 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1594 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1595 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1596 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1597 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1598 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1599 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1600 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1601 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1602 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1603 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1604 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1605 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1606 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1607 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1608 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1609 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1610 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1611 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1612 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1613 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1614 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1615 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1616 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1617 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1618 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1619 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1620 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1621 */
1622FNIEMOP_DEF(iemOp_aaa)
1623{
1624 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1625 IEMOP_HLP_NO_64BIT();
1626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1627 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1628
1629 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aaa);
1630}
1631
1632
1633/**
1634 * @opcode 0x38
1635 */
1636FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1637{
1638 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1639 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8);
1640 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
1641}
1642
1643
1644/**
1645 * @opcode 0x39
1646 */
1647FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1648{
1649 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1650 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
1651}
1652
1653
1654/**
1655 * @opcode 0x3a
1656 */
1657FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1658{
1659 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1660 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1661}
1662
1663
1664/**
1665 * @opcode 0x3b
1666 */
1667FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1668{
1669 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1670 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1671}
1672
1673
1674/**
1675 * @opcode 0x3c
1676 */
1677FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1678{
1679 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1680 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1681}
1682
1683
1684/**
1685 * @opcode 0x3d
1686 */
1687FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1688{
1689 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1690 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1691}
1692
1693
1694/**
1695 * @opcode 0x3e
1696 */
1697FNIEMOP_DEF(iemOp_seg_DS)
1698{
1699 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1700 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1701 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1702
1703 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1704 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1705}
1706
1707
1708/**
1709 * @opcode 0x3f
1710 * @opfltest af,cf
1711 * @opflmodify cf,pf,af,zf,sf,of
1712 * @opflundef pf,zf,sf,of
1713 * @opgroup og_gen_arith_dec
1714 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1715 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1716 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1717 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1718 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1719 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1720 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1721 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1722 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1723 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1724 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1725 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1726 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1727 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1728 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1729 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1730 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1731 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1732 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1733 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1734 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1735 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1736 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1737 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1738 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1739 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1740 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1741 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1742 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1743 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1744 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1745 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1746 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1747 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1748 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1749 */
1750FNIEMOP_DEF(iemOp_aas)
1751{
1752 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1753 IEMOP_HLP_NO_64BIT();
1754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1755 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1756
1757 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aas);
1758}
1759
1760
1761/**
1762 * Common 'inc/dec register' helper.
1763 *
1764 * Not for 64-bit code, only for what became the rex prefixes.
1765 */
1766#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1767 switch (pVCpu->iem.s.enmEffOpSize) \
1768 { \
1769 case IEMMODE_16BIT: \
1770 IEM_MC_BEGIN(2, 0); \
1771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1772 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1773 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1774 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1775 IEM_MC_REF_EFLAGS(pEFlags); \
1776 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1777 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1778 IEM_MC_END(); \
1779 break; \
1780 \
1781 case IEMMODE_32BIT: \
1782 IEM_MC_BEGIN(2, 0); \
1783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1784 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1785 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1786 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1787 IEM_MC_REF_EFLAGS(pEFlags); \
1788 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1789 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
1790 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1791 IEM_MC_END(); \
1792 break; \
1793 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1794 } \
1795 (void)0
1796
1797/**
1798 * @opcode 0x40
1799 */
1800FNIEMOP_DEF(iemOp_inc_eAX)
1801{
1802 /*
1803 * This is a REX prefix in 64-bit mode.
1804 */
1805 if (IEM_IS_64BIT_CODE(pVCpu))
1806 {
1807 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1808 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1809
1810 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1811 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1812 }
1813
1814 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1815 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1816}
1817
1818
1819/**
1820 * @opcode 0x41
1821 */
1822FNIEMOP_DEF(iemOp_inc_eCX)
1823{
1824 /*
1825 * This is a REX prefix in 64-bit mode.
1826 */
1827 if (IEM_IS_64BIT_CODE(pVCpu))
1828 {
1829 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1830 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1831 pVCpu->iem.s.uRexB = 1 << 3;
1832
1833 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1834 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1835 }
1836
1837 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1838 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1839}
1840
1841
1842/**
1843 * @opcode 0x42
1844 */
1845FNIEMOP_DEF(iemOp_inc_eDX)
1846{
1847 /*
1848 * This is a REX prefix in 64-bit mode.
1849 */
1850 if (IEM_IS_64BIT_CODE(pVCpu))
1851 {
1852 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1853 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1854 pVCpu->iem.s.uRexIndex = 1 << 3;
1855
1856 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1857 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1858 }
1859
1860 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1861 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1862}
1863
1864
1865
1866/**
1867 * @opcode 0x43
1868 */
1869FNIEMOP_DEF(iemOp_inc_eBX)
1870{
1871 /*
1872 * This is a REX prefix in 64-bit mode.
1873 */
1874 if (IEM_IS_64BIT_CODE(pVCpu))
1875 {
1876 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1877 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1878 pVCpu->iem.s.uRexB = 1 << 3;
1879 pVCpu->iem.s.uRexIndex = 1 << 3;
1880
1881 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1882 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1883 }
1884
1885 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1886 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1887}
1888
1889
1890/**
1891 * @opcode 0x44
1892 */
1893FNIEMOP_DEF(iemOp_inc_eSP)
1894{
1895 /*
1896 * This is a REX prefix in 64-bit mode.
1897 */
1898 if (IEM_IS_64BIT_CODE(pVCpu))
1899 {
1900 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1901 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1902 pVCpu->iem.s.uRexReg = 1 << 3;
1903
1904 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1905 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1906 }
1907
1908 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1909 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1910}
1911
1912
1913/**
1914 * @opcode 0x45
1915 */
1916FNIEMOP_DEF(iemOp_inc_eBP)
1917{
1918 /*
1919 * This is a REX prefix in 64-bit mode.
1920 */
1921 if (IEM_IS_64BIT_CODE(pVCpu))
1922 {
1923 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1924 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1925 pVCpu->iem.s.uRexReg = 1 << 3;
1926 pVCpu->iem.s.uRexB = 1 << 3;
1927
1928 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1929 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1930 }
1931
1932 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1933 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1934}
1935
1936
1937/**
1938 * @opcode 0x46
1939 */
1940FNIEMOP_DEF(iemOp_inc_eSI)
1941{
1942 /*
1943 * This is a REX prefix in 64-bit mode.
1944 */
1945 if (IEM_IS_64BIT_CODE(pVCpu))
1946 {
1947 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1948 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1949 pVCpu->iem.s.uRexReg = 1 << 3;
1950 pVCpu->iem.s.uRexIndex = 1 << 3;
1951
1952 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1953 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1954 }
1955
1956 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1957 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1958}
1959
1960
1961/**
1962 * @opcode 0x47
1963 */
1964FNIEMOP_DEF(iemOp_inc_eDI)
1965{
1966 /*
1967 * This is a REX prefix in 64-bit mode.
1968 */
1969 if (IEM_IS_64BIT_CODE(pVCpu))
1970 {
1971 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1972 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1973 pVCpu->iem.s.uRexReg = 1 << 3;
1974 pVCpu->iem.s.uRexB = 1 << 3;
1975 pVCpu->iem.s.uRexIndex = 1 << 3;
1976
1977 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1978 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1979 }
1980
1981 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1982 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
1983}
1984
1985
1986/**
1987 * @opcode 0x48
1988 */
1989FNIEMOP_DEF(iemOp_dec_eAX)
1990{
1991 /*
1992 * This is a REX prefix in 64-bit mode.
1993 */
1994 if (IEM_IS_64BIT_CODE(pVCpu))
1995 {
1996 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1997 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1998 iemRecalEffOpSize(pVCpu);
1999
2000 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2001 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2002 }
2003
2004 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2005 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2006}
2007
2008
2009/**
2010 * @opcode 0x49
2011 */
2012FNIEMOP_DEF(iemOp_dec_eCX)
2013{
2014 /*
2015 * This is a REX prefix in 64-bit mode.
2016 */
2017 if (IEM_IS_64BIT_CODE(pVCpu))
2018 {
2019 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2020 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2021 pVCpu->iem.s.uRexB = 1 << 3;
2022 iemRecalEffOpSize(pVCpu);
2023
2024 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2025 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2026 }
2027
2028 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2029 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2030}
2031
2032
2033/**
2034 * @opcode 0x4a
2035 */
2036FNIEMOP_DEF(iemOp_dec_eDX)
2037{
2038 /*
2039 * This is a REX prefix in 64-bit mode.
2040 */
2041 if (IEM_IS_64BIT_CODE(pVCpu))
2042 {
2043 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2044 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2045 pVCpu->iem.s.uRexIndex = 1 << 3;
2046 iemRecalEffOpSize(pVCpu);
2047
2048 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2049 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2050 }
2051
2052 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2053 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2054}
2055
2056
2057/**
2058 * @opcode 0x4b
2059 */
2060FNIEMOP_DEF(iemOp_dec_eBX)
2061{
2062 /*
2063 * This is a REX prefix in 64-bit mode.
2064 */
2065 if (IEM_IS_64BIT_CODE(pVCpu))
2066 {
2067 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2068 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2069 pVCpu->iem.s.uRexB = 1 << 3;
2070 pVCpu->iem.s.uRexIndex = 1 << 3;
2071 iemRecalEffOpSize(pVCpu);
2072
2073 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2074 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2075 }
2076
2077 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2078 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2079}
2080
2081
2082/**
2083 * @opcode 0x4c
2084 */
2085FNIEMOP_DEF(iemOp_dec_eSP)
2086{
2087 /*
2088 * This is a REX prefix in 64-bit mode.
2089 */
2090 if (IEM_IS_64BIT_CODE(pVCpu))
2091 {
2092 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2093 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2094 pVCpu->iem.s.uRexReg = 1 << 3;
2095 iemRecalEffOpSize(pVCpu);
2096
2097 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2098 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2099 }
2100
2101 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2102 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2103}
2104
2105
2106/**
2107 * @opcode 0x4d
2108 */
2109FNIEMOP_DEF(iemOp_dec_eBP)
2110{
2111 /*
2112 * This is a REX prefix in 64-bit mode.
2113 */
2114 if (IEM_IS_64BIT_CODE(pVCpu))
2115 {
2116 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2117 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2118 pVCpu->iem.s.uRexReg = 1 << 3;
2119 pVCpu->iem.s.uRexB = 1 << 3;
2120 iemRecalEffOpSize(pVCpu);
2121
2122 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2123 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2124 }
2125
2126 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2127 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2128}
2129
2130
2131/**
2132 * @opcode 0x4e
2133 */
2134FNIEMOP_DEF(iemOp_dec_eSI)
2135{
2136 /*
2137 * This is a REX prefix in 64-bit mode.
2138 */
2139 if (IEM_IS_64BIT_CODE(pVCpu))
2140 {
2141 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2142 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2143 pVCpu->iem.s.uRexReg = 1 << 3;
2144 pVCpu->iem.s.uRexIndex = 1 << 3;
2145 iemRecalEffOpSize(pVCpu);
2146
2147 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2148 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2149 }
2150
2151 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2152 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2153}
2154
2155
2156/**
2157 * @opcode 0x4f
2158 */
2159FNIEMOP_DEF(iemOp_dec_eDI)
2160{
2161 /*
2162 * This is a REX prefix in 64-bit mode.
2163 */
2164 if (IEM_IS_64BIT_CODE(pVCpu))
2165 {
2166 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2167 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2168 pVCpu->iem.s.uRexReg = 1 << 3;
2169 pVCpu->iem.s.uRexB = 1 << 3;
2170 pVCpu->iem.s.uRexIndex = 1 << 3;
2171 iemRecalEffOpSize(pVCpu);
2172
2173 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2174 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2175 }
2176
2177 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2178 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2179}
2180
2181
2182/**
2183 * Common 'push register' helper.
2184 */
2185FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2186{
2187 if (IEM_IS_64BIT_CODE(pVCpu))
2188 {
2189 iReg |= pVCpu->iem.s.uRexB;
2190 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2191 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2192 }
2193
2194 switch (pVCpu->iem.s.enmEffOpSize)
2195 {
2196 case IEMMODE_16BIT:
2197 IEM_MC_BEGIN(0, 1);
2198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2199 IEM_MC_LOCAL(uint16_t, u16Value);
2200 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2201 IEM_MC_PUSH_U16(u16Value);
2202 IEM_MC_ADVANCE_RIP_AND_FINISH();
2203 IEM_MC_END();
2204 break;
2205
2206 case IEMMODE_32BIT:
2207 IEM_MC_BEGIN(0, 1);
2208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2209 IEM_MC_LOCAL(uint32_t, u32Value);
2210 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2211 IEM_MC_PUSH_U32(u32Value);
2212 IEM_MC_ADVANCE_RIP_AND_FINISH();
2213 IEM_MC_END();
2214 break;
2215
2216 case IEMMODE_64BIT:
2217 IEM_MC_BEGIN(0, 1);
2218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2219 IEM_MC_LOCAL(uint64_t, u64Value);
2220 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2221 IEM_MC_PUSH_U64(u64Value);
2222 IEM_MC_ADVANCE_RIP_AND_FINISH();
2223 IEM_MC_END();
2224 break;
2225
2226 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2227 }
2228}
2229
2230
2231/**
2232 * @opcode 0x50
2233 */
2234FNIEMOP_DEF(iemOp_push_eAX)
2235{
2236 IEMOP_MNEMONIC(push_rAX, "push rAX");
2237 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2238}
2239
2240
2241/**
2242 * @opcode 0x51
2243 */
2244FNIEMOP_DEF(iemOp_push_eCX)
2245{
2246 IEMOP_MNEMONIC(push_rCX, "push rCX");
2247 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2248}
2249
2250
2251/**
2252 * @opcode 0x52
2253 */
2254FNIEMOP_DEF(iemOp_push_eDX)
2255{
2256 IEMOP_MNEMONIC(push_rDX, "push rDX");
2257 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2258}
2259
2260
2261/**
2262 * @opcode 0x53
2263 */
2264FNIEMOP_DEF(iemOp_push_eBX)
2265{
2266 IEMOP_MNEMONIC(push_rBX, "push rBX");
2267 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2268}
2269
2270
2271/**
2272 * @opcode 0x54
2273 */
2274FNIEMOP_DEF(iemOp_push_eSP)
2275{
2276 IEMOP_MNEMONIC(push_rSP, "push rSP");
2277 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
2278 {
2279 IEM_MC_BEGIN(0, 1);
2280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2281 IEM_MC_LOCAL(uint16_t, u16Value);
2282 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2283 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2284 IEM_MC_PUSH_U16(u16Value);
2285 IEM_MC_ADVANCE_RIP_AND_FINISH();
2286 IEM_MC_END();
2287 }
2288 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2289}
2290
2291
2292/**
2293 * @opcode 0x55
2294 */
2295FNIEMOP_DEF(iemOp_push_eBP)
2296{
2297 IEMOP_MNEMONIC(push_rBP, "push rBP");
2298 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2299}
2300
2301
2302/**
2303 * @opcode 0x56
2304 */
2305FNIEMOP_DEF(iemOp_push_eSI)
2306{
2307 IEMOP_MNEMONIC(push_rSI, "push rSI");
2308 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2309}
2310
2311
2312/**
2313 * @opcode 0x57
2314 */
2315FNIEMOP_DEF(iemOp_push_eDI)
2316{
2317 IEMOP_MNEMONIC(push_rDI, "push rDI");
2318 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2319}
2320
2321
2322/**
2323 * Common 'pop register' helper.
2324 */
2325FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2326{
2327 if (IEM_IS_64BIT_CODE(pVCpu))
2328 {
2329 iReg |= pVCpu->iem.s.uRexB;
2330 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2331 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2332 }
2333
2334 switch (pVCpu->iem.s.enmEffOpSize)
2335 {
2336 case IEMMODE_16BIT:
2337 IEM_MC_BEGIN(0, 1);
2338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2339 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2340 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
2341 IEM_MC_POP_U16(pu16Dst);
2342 IEM_MC_ADVANCE_RIP_AND_FINISH();
2343 IEM_MC_END();
2344 break;
2345
2346 case IEMMODE_32BIT:
2347 IEM_MC_BEGIN(0, 1);
2348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2349 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2350 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
2351 IEM_MC_POP_U32(pu32Dst);
2352 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
2353 IEM_MC_ADVANCE_RIP_AND_FINISH();
2354 IEM_MC_END();
2355 break;
2356
2357 case IEMMODE_64BIT:
2358 IEM_MC_BEGIN(0, 1);
2359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2360 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2361 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
2362 IEM_MC_POP_U64(pu64Dst);
2363 IEM_MC_ADVANCE_RIP_AND_FINISH();
2364 IEM_MC_END();
2365 break;
2366
2367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2368 }
2369}
2370
2371
2372/**
2373 * @opcode 0x58
2374 */
2375FNIEMOP_DEF(iemOp_pop_eAX)
2376{
2377 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2378 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2379}
2380
2381
2382/**
2383 * @opcode 0x59
2384 */
2385FNIEMOP_DEF(iemOp_pop_eCX)
2386{
2387 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2388 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2389}
2390
2391
2392/**
2393 * @opcode 0x5a
2394 */
2395FNIEMOP_DEF(iemOp_pop_eDX)
2396{
2397 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2398 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2399}
2400
2401
2402/**
2403 * @opcode 0x5b
2404 */
2405FNIEMOP_DEF(iemOp_pop_eBX)
2406{
2407 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2408 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2409}
2410
2411
2412/**
2413 * @opcode 0x5c
2414 */
2415FNIEMOP_DEF(iemOp_pop_eSP)
2416{
2417 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2418 if (IEM_IS_64BIT_CODE(pVCpu))
2419 {
2420 if (pVCpu->iem.s.uRexB)
2421 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2422 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2423 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2424 }
2425
2426 /** @todo add testcase for this instruction. */
2427 switch (pVCpu->iem.s.enmEffOpSize)
2428 {
2429 case IEMMODE_16BIT:
2430 IEM_MC_BEGIN(0, 1);
2431 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2432 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2433 IEM_MC_LOCAL(uint16_t, u16Dst);
2434 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
2435 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
2436 IEM_MC_ADVANCE_RIP_AND_FINISH();
2437 IEM_MC_END();
2438 break;
2439
2440 case IEMMODE_32BIT:
2441 IEM_MC_BEGIN(0, 1);
2442 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2443 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2444 IEM_MC_LOCAL(uint32_t, u32Dst);
2445 IEM_MC_POP_U32(&u32Dst);
2446 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
2447 IEM_MC_ADVANCE_RIP_AND_FINISH();
2448 IEM_MC_END();
2449 break;
2450
2451 case IEMMODE_64BIT:
2452 IEM_MC_BEGIN(0, 1);
2453 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2454 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2455 IEM_MC_LOCAL(uint64_t, u64Dst);
2456 IEM_MC_POP_U64(&u64Dst);
2457 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
2458 IEM_MC_ADVANCE_RIP_AND_FINISH();
2459 IEM_MC_END();
2460 break;
2461
2462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2463 }
2464}
2465
2466
2467/**
2468 * @opcode 0x5d
2469 */
2470FNIEMOP_DEF(iemOp_pop_eBP)
2471{
2472 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2473 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2474}
2475
2476
2477/**
2478 * @opcode 0x5e
2479 */
2480FNIEMOP_DEF(iemOp_pop_eSI)
2481{
2482 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2483 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2484}
2485
2486
2487/**
2488 * @opcode 0x5f
2489 */
2490FNIEMOP_DEF(iemOp_pop_eDI)
2491{
2492 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2493 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2494}
2495
2496
2497/**
2498 * @opcode 0x60
2499 */
2500FNIEMOP_DEF(iemOp_pusha)
2501{
2502 IEMOP_MNEMONIC(pusha, "pusha");
2503 IEMOP_HLP_MIN_186();
2504 IEMOP_HLP_NO_64BIT();
2505 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2506 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_16);
2507 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2508 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_32);
2509}
2510
2511
2512/**
2513 * @opcode 0x61
2514 */
2515FNIEMOP_DEF(iemOp_popa__mvex)
2516{
2517 if (!IEM_IS_64BIT_CODE(pVCpu))
2518 {
2519 IEMOP_MNEMONIC(popa, "popa");
2520 IEMOP_HLP_MIN_186();
2521 IEMOP_HLP_NO_64BIT();
2522 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2523 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_popa_16);
2524 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2525 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_popa_32);
2526 }
2527 IEMOP_MNEMONIC(mvex, "mvex");
2528 Log(("mvex prefix is not supported!\n"));
2529 IEMOP_RAISE_INVALID_OPCODE_RET();
2530}
2531
2532
2533/**
2534 * @opcode 0x62
2535 * @opmnemonic bound
2536 * @op1 Gv_RO
2537 * @op2 Ma
2538 * @opmincpu 80186
2539 * @ophints harmless x86_invalid_64
2540 * @optest op1=0 op2=0 ->
2541 * @optest op1=1 op2=0 -> value.xcpt=5
2542 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2543 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2544 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2545 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2546 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2547 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2548 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2549 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2550 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2551 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2552 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2553 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2554 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2555 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2556 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2557 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2558 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2559 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2560 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2561 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2562 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2563 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2564 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2565 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2566 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2567 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2568 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2569 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2570 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2571 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2572 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2573 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2574 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2575 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2576 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2577 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2578 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2579 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2580 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2581 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2582 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2583 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2584 */
2585FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2586{
2587 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2588 compatability mode it is invalid with MOD=3.
2589
2590 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2591 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2592 given as R and X without an exact description, so we assume it builds on
2593 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2594 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2595 uint8_t bRm;
2596 if (!IEM_IS_64BIT_CODE(pVCpu))
2597 {
2598 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2599 IEMOP_HLP_MIN_186();
2600 IEM_OPCODE_GET_NEXT_U8(&bRm);
2601 if (IEM_IS_MODRM_MEM_MODE(bRm))
2602 {
2603 /** @todo testcase: check that there are two memory accesses involved. Check
2604 * whether they're both read before the \#BR triggers. */
2605 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2606 {
2607 IEM_MC_BEGIN(3, 1);
2608 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2609 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2610 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2612
2613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2615
2616 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2617 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2618 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2619
2620 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2621 IEM_MC_END();
2622 }
2623 else /* 32-bit operands */
2624 {
2625 IEM_MC_BEGIN(3, 1);
2626 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2627 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2628 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2630
2631 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2633
2634 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2635 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2636 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2637
2638 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2639 IEM_MC_END();
2640 }
2641 }
2642
2643 /*
2644 * @opdone
2645 */
2646 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2647 {
2648 /* Note that there is no need for the CPU to fetch further bytes
2649 here because MODRM.MOD == 3. */
2650 Log(("evex not supported by the guest CPU!\n"));
2651 IEMOP_RAISE_INVALID_OPCODE_RET();
2652 }
2653 }
2654 else
2655 {
2656 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2657 * does modr/m read, whereas AMD probably doesn't... */
2658 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2659 {
2660 Log(("evex not supported by the guest CPU!\n"));
2661 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2662 }
2663 IEM_OPCODE_GET_NEXT_U8(&bRm);
2664 }
2665
2666 IEMOP_MNEMONIC(evex, "evex");
2667 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2668 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2669 Log(("evex prefix is not implemented!\n"));
2670 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2671}
2672
2673
2674/** Opcode 0x63 - non-64-bit modes. */
2675FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2676{
2677 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2678 IEMOP_HLP_MIN_286();
2679 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2681
2682 if (IEM_IS_MODRM_REG_MODE(bRm))
2683 {
2684 /* Register */
2685 IEM_MC_BEGIN(3, 0);
2686 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2687 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2688 IEM_MC_ARG(uint16_t, u16Src, 1);
2689 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2690
2691 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2692 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2693 IEM_MC_REF_EFLAGS(pEFlags);
2694 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2695
2696 IEM_MC_ADVANCE_RIP_AND_FINISH();
2697 IEM_MC_END();
2698 }
2699 else
2700 {
2701 /* Memory */
2702 IEM_MC_BEGIN(3, 3);
2703 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2704 IEM_MC_ARG(uint16_t, u16Src, 1);
2705 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2707 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2708
2709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2710 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2711 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2712 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2713 IEM_MC_FETCH_EFLAGS(EFlags);
2714 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2715
2716 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
2717 IEM_MC_COMMIT_EFLAGS(EFlags);
2718 IEM_MC_ADVANCE_RIP_AND_FINISH();
2719 IEM_MC_END();
2720 }
2721}
2722
2723
2724/**
2725 * @opcode 0x63
2726 *
2727 * @note This is a weird one. It works like a regular move instruction if
2728 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2729 * @todo This definitely needs a testcase to verify the odd cases. */
2730FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2731{
2732 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2733
2734 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2736
2737 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2738 {
2739 if (IEM_IS_MODRM_REG_MODE(bRm))
2740 {
2741 /*
2742 * Register to register.
2743 */
2744 IEM_MC_BEGIN(0, 1);
2745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2746 IEM_MC_LOCAL(uint64_t, u64Value);
2747 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2748 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2749 IEM_MC_ADVANCE_RIP_AND_FINISH();
2750 IEM_MC_END();
2751 }
2752 else
2753 {
2754 /*
2755 * We're loading a register from memory.
2756 */
2757 IEM_MC_BEGIN(0, 2);
2758 IEM_MC_LOCAL(uint64_t, u64Value);
2759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2762 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2763 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2764 IEM_MC_ADVANCE_RIP_AND_FINISH();
2765 IEM_MC_END();
2766 }
2767 }
2768 else
2769 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2770}
2771
2772
2773/**
2774 * @opcode 0x64
2775 * @opmnemonic segfs
2776 * @opmincpu 80386
2777 * @opgroup og_prefixes
2778 */
2779FNIEMOP_DEF(iemOp_seg_FS)
2780{
2781 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2782 IEMOP_HLP_MIN_386();
2783
2784 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2785 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2786
2787 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2788 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2789}
2790
2791
2792/**
2793 * @opcode 0x65
2794 * @opmnemonic seggs
2795 * @opmincpu 80386
2796 * @opgroup og_prefixes
2797 */
2798FNIEMOP_DEF(iemOp_seg_GS)
2799{
2800 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2801 IEMOP_HLP_MIN_386();
2802
2803 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2804 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2805
2806 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2807 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2808}
2809
2810
2811/**
2812 * @opcode 0x66
2813 * @opmnemonic opsize
2814 * @openc prefix
2815 * @opmincpu 80386
2816 * @ophints harmless
2817 * @opgroup og_prefixes
2818 */
2819FNIEMOP_DEF(iemOp_op_size)
2820{
2821 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2822 IEMOP_HLP_MIN_386();
2823
2824 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2825 iemRecalEffOpSize(pVCpu);
2826
2827 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2828 when REPZ or REPNZ are present. */
2829 if (pVCpu->iem.s.idxPrefix == 0)
2830 pVCpu->iem.s.idxPrefix = 1;
2831
2832 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2833 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2834}
2835
2836
2837/**
2838 * @opcode 0x67
2839 * @opmnemonic addrsize
2840 * @openc prefix
2841 * @opmincpu 80386
2842 * @ophints harmless
2843 * @opgroup og_prefixes
2844 */
2845FNIEMOP_DEF(iemOp_addr_size)
2846{
2847 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2848 IEMOP_HLP_MIN_386();
2849
2850 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2851 switch (pVCpu->iem.s.enmDefAddrMode)
2852 {
2853 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2854 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2855 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2856 default: AssertFailed();
2857 }
2858
2859 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2860 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2861}
2862
2863
2864/**
2865 * @opcode 0x68
2866 */
2867FNIEMOP_DEF(iemOp_push_Iz)
2868{
2869 IEMOP_MNEMONIC(push_Iz, "push Iz");
2870 IEMOP_HLP_MIN_186();
2871 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2872 switch (pVCpu->iem.s.enmEffOpSize)
2873 {
2874 case IEMMODE_16BIT:
2875 {
2876 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2877 IEM_MC_BEGIN(0,0);
2878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2879 IEM_MC_PUSH_U16(u16Imm);
2880 IEM_MC_ADVANCE_RIP_AND_FINISH();
2881 IEM_MC_END();
2882 break;
2883 }
2884
2885 case IEMMODE_32BIT:
2886 {
2887 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2888 IEM_MC_BEGIN(0,0);
2889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2890 IEM_MC_PUSH_U32(u32Imm);
2891 IEM_MC_ADVANCE_RIP_AND_FINISH();
2892 IEM_MC_END();
2893 break;
2894 }
2895
2896 case IEMMODE_64BIT:
2897 {
2898 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2899 IEM_MC_BEGIN(0,0);
2900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2901 IEM_MC_PUSH_U64(u64Imm);
2902 IEM_MC_ADVANCE_RIP_AND_FINISH();
2903 IEM_MC_END();
2904 break;
2905 }
2906
2907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2908 }
2909}
2910
2911
2912/**
2913 * @opcode 0x69
2914 */
2915FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2916{
2917 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2918 IEMOP_HLP_MIN_186();
2919 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2920 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2921
2922 switch (pVCpu->iem.s.enmEffOpSize)
2923 {
2924 case IEMMODE_16BIT:
2925 {
2926 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2927 if (IEM_IS_MODRM_REG_MODE(bRm))
2928 {
2929 /* register operand */
2930 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2931 IEM_MC_BEGIN(3, 1);
2932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2933 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2934 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2935 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2936 IEM_MC_LOCAL(uint16_t, u16Tmp);
2937
2938 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2939 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2940 IEM_MC_REF_EFLAGS(pEFlags);
2941 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2942 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2943
2944 IEM_MC_ADVANCE_RIP_AND_FINISH();
2945 IEM_MC_END();
2946 }
2947 else
2948 {
2949 /* memory operand */
2950 IEM_MC_BEGIN(3, 2);
2951 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2952 IEM_MC_ARG(uint16_t, u16Src, 1);
2953 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2954 IEM_MC_LOCAL(uint16_t, u16Tmp);
2955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2956
2957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2958 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2959 IEM_MC_ASSIGN(u16Src, u16Imm);
2960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2961 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2962 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2963 IEM_MC_REF_EFLAGS(pEFlags);
2964 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2965 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2966
2967 IEM_MC_ADVANCE_RIP_AND_FINISH();
2968 IEM_MC_END();
2969 }
2970 break;
2971 }
2972
2973 case IEMMODE_32BIT:
2974 {
2975 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2976 if (IEM_IS_MODRM_REG_MODE(bRm))
2977 {
2978 /* register operand */
2979 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2980 IEM_MC_BEGIN(3, 1);
2981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2982 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2983 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2984 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2985 IEM_MC_LOCAL(uint32_t, u32Tmp);
2986
2987 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2988 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2989 IEM_MC_REF_EFLAGS(pEFlags);
2990 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2991 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2992
2993 IEM_MC_ADVANCE_RIP_AND_FINISH();
2994 IEM_MC_END();
2995 }
2996 else
2997 {
2998 /* memory operand */
2999 IEM_MC_BEGIN(3, 2);
3000 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3001 IEM_MC_ARG(uint32_t, u32Src, 1);
3002 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3003 IEM_MC_LOCAL(uint32_t, u32Tmp);
3004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3005
3006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3007 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3008 IEM_MC_ASSIGN(u32Src, u32Imm);
3009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3010 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3011 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3012 IEM_MC_REF_EFLAGS(pEFlags);
3013 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3014 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3015
3016 IEM_MC_ADVANCE_RIP_AND_FINISH();
3017 IEM_MC_END();
3018 }
3019 break;
3020 }
3021
3022 case IEMMODE_64BIT:
3023 {
3024 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3025 if (IEM_IS_MODRM_REG_MODE(bRm))
3026 {
3027 /* register operand */
3028 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3029 IEM_MC_BEGIN(3, 1);
3030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3031 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3032 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
3033 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3034 IEM_MC_LOCAL(uint64_t, u64Tmp);
3035
3036 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3037 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3038 IEM_MC_REF_EFLAGS(pEFlags);
3039 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3040 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3041
3042 IEM_MC_ADVANCE_RIP_AND_FINISH();
3043 IEM_MC_END();
3044 }
3045 else
3046 {
3047 /* memory operand */
3048 IEM_MC_BEGIN(3, 2);
3049 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3050 IEM_MC_ARG(uint64_t, u64Src, 1);
3051 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3052 IEM_MC_LOCAL(uint64_t, u64Tmp);
3053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3054
3055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3056 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3057 IEM_MC_ASSIGN_U32_SX_U64(u64Src, u32Imm); /* parameter count for the threaded function for this block. */
3058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3059 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3060 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3061 IEM_MC_REF_EFLAGS(pEFlags);
3062 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3063 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3064
3065 IEM_MC_ADVANCE_RIP_AND_FINISH();
3066 IEM_MC_END();
3067 }
3068 break;
3069 }
3070
3071 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3072 }
3073}
3074
3075
3076/**
3077 * @opcode 0x6a
3078 */
3079FNIEMOP_DEF(iemOp_push_Ib)
3080{
3081 IEMOP_MNEMONIC(push_Ib, "push Ib");
3082 IEMOP_HLP_MIN_186();
3083 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3084 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3085
3086 switch (pVCpu->iem.s.enmEffOpSize)
3087 {
3088 case IEMMODE_16BIT:
3089 IEM_MC_BEGIN(0,0);
3090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3091 IEM_MC_PUSH_U16(i8Imm);
3092 IEM_MC_ADVANCE_RIP_AND_FINISH();
3093 IEM_MC_END();
3094 break;
3095 case IEMMODE_32BIT:
3096 IEM_MC_BEGIN(0,0);
3097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3098 IEM_MC_PUSH_U32(i8Imm);
3099 IEM_MC_ADVANCE_RIP_AND_FINISH();
3100 IEM_MC_END();
3101 break;
3102 case IEMMODE_64BIT:
3103 IEM_MC_BEGIN(0,0);
3104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3105 IEM_MC_PUSH_U64(i8Imm);
3106 IEM_MC_ADVANCE_RIP_AND_FINISH();
3107 IEM_MC_END();
3108 break;
3109 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3110 }
3111}
3112
3113
3114/**
3115 * @opcode 0x6b
3116 */
3117FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3118{
3119 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3120 IEMOP_HLP_MIN_186();
3121 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3122 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3123
3124 switch (pVCpu->iem.s.enmEffOpSize)
3125 {
3126 case IEMMODE_16BIT:
3127 {
3128 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3129 if (IEM_IS_MODRM_REG_MODE(bRm))
3130 {
3131 /* register operand */
3132 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3133 IEM_MC_BEGIN(3, 1);
3134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3135 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3136 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3137 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3138 IEM_MC_LOCAL(uint16_t, u16Tmp);
3139
3140 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3141 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3142 IEM_MC_REF_EFLAGS(pEFlags);
3143 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3144 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3145
3146 IEM_MC_ADVANCE_RIP_AND_FINISH();
3147 IEM_MC_END();
3148 }
3149 else
3150 {
3151 /* memory operand */
3152 IEM_MC_BEGIN(3, 2);
3153 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3154 IEM_MC_ARG(uint16_t, u16Src, 1);
3155 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3156 IEM_MC_LOCAL(uint16_t, u16Tmp);
3157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3158
3159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3160 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3161 IEM_MC_ASSIGN(u16Src, u16Imm);
3162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3163 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3164 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3165 IEM_MC_REF_EFLAGS(pEFlags);
3166 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3167 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3168
3169 IEM_MC_ADVANCE_RIP_AND_FINISH();
3170 IEM_MC_END();
3171 }
3172 break;
3173 }
3174
3175 case IEMMODE_32BIT:
3176 {
3177 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3178 if (IEM_IS_MODRM_REG_MODE(bRm))
3179 {
3180 /* register operand */
3181 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3182 IEM_MC_BEGIN(3, 1);
3183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3184 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3185 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3186 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3187 IEM_MC_LOCAL(uint32_t, u32Tmp);
3188
3189 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3190 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3191 IEM_MC_REF_EFLAGS(pEFlags);
3192 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3193 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3194
3195 IEM_MC_ADVANCE_RIP_AND_FINISH();
3196 IEM_MC_END();
3197 }
3198 else
3199 {
3200 /* memory operand */
3201 IEM_MC_BEGIN(3, 2);
3202 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3203 IEM_MC_ARG(uint32_t, u32Src, 1);
3204 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3205 IEM_MC_LOCAL(uint32_t, u32Tmp);
3206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3207
3208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3209 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3210 IEM_MC_ASSIGN(u32Src, u32Imm);
3211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3212 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3213 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3214 IEM_MC_REF_EFLAGS(pEFlags);
3215 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3216 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3217
3218 IEM_MC_ADVANCE_RIP_AND_FINISH();
3219 IEM_MC_END();
3220 }
3221 break;
3222 }
3223
3224 case IEMMODE_64BIT:
3225 {
3226 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3227 if (IEM_IS_MODRM_REG_MODE(bRm))
3228 {
3229 /* register operand */
3230 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3231 IEM_MC_BEGIN(3, 1);
3232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3233 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3234 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
3235 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3236 IEM_MC_LOCAL(uint64_t, u64Tmp);
3237
3238 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3239 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3240 IEM_MC_REF_EFLAGS(pEFlags);
3241 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3242 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3243
3244 IEM_MC_ADVANCE_RIP_AND_FINISH();
3245 IEM_MC_END();
3246 }
3247 else
3248 {
3249 /* memory operand */
3250 IEM_MC_BEGIN(3, 2);
3251 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3252 IEM_MC_ARG(uint64_t, u64Src, 1);
3253 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3254 IEM_MC_LOCAL(uint64_t, u64Tmp);
3255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3256
3257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3258 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the */
3259 IEM_MC_ASSIGN_U8_SX_U64(u64Src, u8Imm); /* parameter count for the threaded function for this block. */
3260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3261 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3262 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3263 IEM_MC_REF_EFLAGS(pEFlags);
3264 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3265 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3266
3267 IEM_MC_ADVANCE_RIP_AND_FINISH();
3268 IEM_MC_END();
3269 }
3270 break;
3271 }
3272
3273 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3274 }
3275}
3276
3277
3278/**
3279 * @opcode 0x6c
3280 */
3281FNIEMOP_DEF(iemOp_insb_Yb_DX)
3282{
3283 IEMOP_HLP_MIN_186();
3284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3285 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3286 {
3287 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3288 switch (pVCpu->iem.s.enmEffAddrMode)
3289 {
3290 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3291 iemCImpl_rep_ins_op8_addr16, false);
3292 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3293 iemCImpl_rep_ins_op8_addr32, false);
3294 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3295 iemCImpl_rep_ins_op8_addr64, false);
3296 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3297 }
3298 }
3299 else
3300 {
3301 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3302 switch (pVCpu->iem.s.enmEffAddrMode)
3303 {
3304 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3305 iemCImpl_ins_op8_addr16, false);
3306 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3307 iemCImpl_ins_op8_addr32, false);
3308 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3309 iemCImpl_ins_op8_addr64, false);
3310 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3311 }
3312 }
3313}
3314
3315
3316/**
3317 * @opcode 0x6d
3318 */
3319FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3320{
3321 IEMOP_HLP_MIN_186();
3322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3323 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3324 {
3325 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3326 switch (pVCpu->iem.s.enmEffOpSize)
3327 {
3328 case IEMMODE_16BIT:
3329 switch (pVCpu->iem.s.enmEffAddrMode)
3330 {
3331 case IEMMODE_16BIT:
3332 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3333 iemCImpl_rep_ins_op16_addr16, false);
3334 case IEMMODE_32BIT:
3335 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3336 iemCImpl_rep_ins_op16_addr32, false);
3337 case IEMMODE_64BIT:
3338 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3339 iemCImpl_rep_ins_op16_addr64, false);
3340 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3341 }
3342 break;
3343 case IEMMODE_64BIT:
3344 case IEMMODE_32BIT:
3345 switch (pVCpu->iem.s.enmEffAddrMode)
3346 {
3347 case IEMMODE_16BIT:
3348 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3349 iemCImpl_rep_ins_op32_addr16, false);
3350 case IEMMODE_32BIT:
3351 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3352 iemCImpl_rep_ins_op32_addr32, false);
3353 case IEMMODE_64BIT:
3354 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3355 iemCImpl_rep_ins_op32_addr64, false);
3356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3357 }
3358 break;
3359 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3360 }
3361 }
3362 else
3363 {
3364 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3365 switch (pVCpu->iem.s.enmEffOpSize)
3366 {
3367 case IEMMODE_16BIT:
3368 switch (pVCpu->iem.s.enmEffAddrMode)
3369 {
3370 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3371 iemCImpl_ins_op16_addr16, false);
3372 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3373 iemCImpl_ins_op16_addr32, false);
3374 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3375 iemCImpl_ins_op16_addr64, false);
3376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3377 }
3378 break;
3379 case IEMMODE_64BIT:
3380 case IEMMODE_32BIT:
3381 switch (pVCpu->iem.s.enmEffAddrMode)
3382 {
3383 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3384 iemCImpl_ins_op32_addr16, false);
3385 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3386 iemCImpl_ins_op32_addr32, false);
3387 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3388 iemCImpl_ins_op32_addr64, false);
3389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3390 }
3391 break;
3392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3393 }
3394 }
3395}
3396
3397
3398/**
3399 * @opcode 0x6e
3400 */
3401FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3402{
3403 IEMOP_HLP_MIN_186();
3404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3405 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3406 {
3407 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3408 switch (pVCpu->iem.s.enmEffAddrMode)
3409 {
3410 case IEMMODE_16BIT:
3411 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3412 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3413 case IEMMODE_32BIT:
3414 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3415 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3416 case IEMMODE_64BIT:
3417 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3418 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3419 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3420 }
3421 }
3422 else
3423 {
3424 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3425 switch (pVCpu->iem.s.enmEffAddrMode)
3426 {
3427 case IEMMODE_16BIT:
3428 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3429 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3430 case IEMMODE_32BIT:
3431 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3432 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3433 case IEMMODE_64BIT:
3434 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3435 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3436 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3437 }
3438 }
3439}
3440
3441
3442/**
3443 * @opcode 0x6f
3444 */
3445FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3446{
3447 IEMOP_HLP_MIN_186();
3448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3449 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3450 {
3451 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3452 switch (pVCpu->iem.s.enmEffOpSize)
3453 {
3454 case IEMMODE_16BIT:
3455 switch (pVCpu->iem.s.enmEffAddrMode)
3456 {
3457 case IEMMODE_16BIT:
3458 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3459 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3460 case IEMMODE_32BIT:
3461 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3462 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3463 case IEMMODE_64BIT:
3464 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3465 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3466 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3467 }
3468 break;
3469 case IEMMODE_64BIT:
3470 case IEMMODE_32BIT:
3471 switch (pVCpu->iem.s.enmEffAddrMode)
3472 {
3473 case IEMMODE_16BIT:
3474 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3475 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3476 case IEMMODE_32BIT:
3477 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3478 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3479 case IEMMODE_64BIT:
3480 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3481 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3482 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3483 }
3484 break;
3485 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3486 }
3487 }
3488 else
3489 {
3490 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3491 switch (pVCpu->iem.s.enmEffOpSize)
3492 {
3493 case IEMMODE_16BIT:
3494 switch (pVCpu->iem.s.enmEffAddrMode)
3495 {
3496 case IEMMODE_16BIT:
3497 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3498 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3499 case IEMMODE_32BIT:
3500 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3501 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3502 case IEMMODE_64BIT:
3503 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3504 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3505 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3506 }
3507 break;
3508 case IEMMODE_64BIT:
3509 case IEMMODE_32BIT:
3510 switch (pVCpu->iem.s.enmEffAddrMode)
3511 {
3512 case IEMMODE_16BIT:
3513 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3514 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3515 case IEMMODE_32BIT:
3516 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3517 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3518 case IEMMODE_64BIT:
3519 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3520 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3521 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3522 }
3523 break;
3524 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3525 }
3526 }
3527}
3528
3529
3530/**
3531 * @opcode 0x70
3532 */
3533FNIEMOP_DEF(iemOp_jo_Jb)
3534{
3535 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3536 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3537 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3538
3539 IEM_MC_BEGIN(0, 0);
3540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3541 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3542 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3543 } IEM_MC_ELSE() {
3544 IEM_MC_ADVANCE_RIP_AND_FINISH();
3545 } IEM_MC_ENDIF();
3546 IEM_MC_END();
3547}
3548
3549
3550/**
3551 * @opcode 0x71
3552 */
3553FNIEMOP_DEF(iemOp_jno_Jb)
3554{
3555 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3556 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3557 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3558
3559 IEM_MC_BEGIN(0, 0);
3560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3561 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3562 IEM_MC_ADVANCE_RIP_AND_FINISH();
3563 } IEM_MC_ELSE() {
3564 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3565 } IEM_MC_ENDIF();
3566 IEM_MC_END();
3567}
3568
3569/**
3570 * @opcode 0x72
3571 */
3572FNIEMOP_DEF(iemOp_jc_Jb)
3573{
3574 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3575 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3576 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3577
3578 IEM_MC_BEGIN(0, 0);
3579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3580 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3581 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3582 } IEM_MC_ELSE() {
3583 IEM_MC_ADVANCE_RIP_AND_FINISH();
3584 } IEM_MC_ENDIF();
3585 IEM_MC_END();
3586}
3587
3588
3589/**
3590 * @opcode 0x73
3591 */
3592FNIEMOP_DEF(iemOp_jnc_Jb)
3593{
3594 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3595 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3596 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3597
3598 IEM_MC_BEGIN(0, 0);
3599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3600 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3601 IEM_MC_ADVANCE_RIP_AND_FINISH();
3602 } IEM_MC_ELSE() {
3603 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3604 } IEM_MC_ENDIF();
3605 IEM_MC_END();
3606}
3607
3608
3609/**
3610 * @opcode 0x74
3611 */
3612FNIEMOP_DEF(iemOp_je_Jb)
3613{
3614 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3615 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3616 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3617
3618 IEM_MC_BEGIN(0, 0);
3619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3620 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3621 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3622 } IEM_MC_ELSE() {
3623 IEM_MC_ADVANCE_RIP_AND_FINISH();
3624 } IEM_MC_ENDIF();
3625 IEM_MC_END();
3626}
3627
3628
3629/**
3630 * @opcode 0x75
3631 */
3632FNIEMOP_DEF(iemOp_jne_Jb)
3633{
3634 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3635 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3636 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3637
3638 IEM_MC_BEGIN(0, 0);
3639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3640 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3641 IEM_MC_ADVANCE_RIP_AND_FINISH();
3642 } IEM_MC_ELSE() {
3643 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3644 } IEM_MC_ENDIF();
3645 IEM_MC_END();
3646}
3647
3648
3649/**
3650 * @opcode 0x76
3651 */
3652FNIEMOP_DEF(iemOp_jbe_Jb)
3653{
3654 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3655 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3656 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3657
3658 IEM_MC_BEGIN(0, 0);
3659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3660 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3661 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3662 } IEM_MC_ELSE() {
3663 IEM_MC_ADVANCE_RIP_AND_FINISH();
3664 } IEM_MC_ENDIF();
3665 IEM_MC_END();
3666}
3667
3668
3669/**
3670 * @opcode 0x77
3671 */
3672FNIEMOP_DEF(iemOp_jnbe_Jb)
3673{
3674 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3675 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3676 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3677
3678 IEM_MC_BEGIN(0, 0);
3679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3680 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3681 IEM_MC_ADVANCE_RIP_AND_FINISH();
3682 } IEM_MC_ELSE() {
3683 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3684 } IEM_MC_ENDIF();
3685 IEM_MC_END();
3686}
3687
3688
3689/**
3690 * @opcode 0x78
3691 */
3692FNIEMOP_DEF(iemOp_js_Jb)
3693{
3694 IEMOP_MNEMONIC(js_Jb, "js Jb");
3695 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3696 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3697
3698 IEM_MC_BEGIN(0, 0);
3699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3700 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3701 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3702 } IEM_MC_ELSE() {
3703 IEM_MC_ADVANCE_RIP_AND_FINISH();
3704 } IEM_MC_ENDIF();
3705 IEM_MC_END();
3706}
3707
3708
3709/**
3710 * @opcode 0x79
3711 */
3712FNIEMOP_DEF(iemOp_jns_Jb)
3713{
3714 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3715 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3716 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3717
3718 IEM_MC_BEGIN(0, 0);
3719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3720 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3721 IEM_MC_ADVANCE_RIP_AND_FINISH();
3722 } IEM_MC_ELSE() {
3723 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3724 } IEM_MC_ENDIF();
3725 IEM_MC_END();
3726}
3727
3728
3729/**
3730 * @opcode 0x7a
3731 */
3732FNIEMOP_DEF(iemOp_jp_Jb)
3733{
3734 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3735 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3736 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3737
3738 IEM_MC_BEGIN(0, 0);
3739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3740 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3741 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3742 } IEM_MC_ELSE() {
3743 IEM_MC_ADVANCE_RIP_AND_FINISH();
3744 } IEM_MC_ENDIF();
3745 IEM_MC_END();
3746}
3747
3748
3749/**
3750 * @opcode 0x7b
3751 */
3752FNIEMOP_DEF(iemOp_jnp_Jb)
3753{
3754 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3755 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3756 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3757
3758 IEM_MC_BEGIN(0, 0);
3759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3760 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3761 IEM_MC_ADVANCE_RIP_AND_FINISH();
3762 } IEM_MC_ELSE() {
3763 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3764 } IEM_MC_ENDIF();
3765 IEM_MC_END();
3766}
3767
3768
3769/**
3770 * @opcode 0x7c
3771 */
3772FNIEMOP_DEF(iemOp_jl_Jb)
3773{
3774 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3775 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3776 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3777
3778 IEM_MC_BEGIN(0, 0);
3779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3780 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3781 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3782 } IEM_MC_ELSE() {
3783 IEM_MC_ADVANCE_RIP_AND_FINISH();
3784 } IEM_MC_ENDIF();
3785 IEM_MC_END();
3786}
3787
3788
3789/**
3790 * @opcode 0x7d
3791 */
3792FNIEMOP_DEF(iemOp_jnl_Jb)
3793{
3794 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3795 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3796 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3797
3798 IEM_MC_BEGIN(0, 0);
3799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3800 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3801 IEM_MC_ADVANCE_RIP_AND_FINISH();
3802 } IEM_MC_ELSE() {
3803 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3804 } IEM_MC_ENDIF();
3805 IEM_MC_END();
3806}
3807
3808
3809/**
3810 * @opcode 0x7e
3811 */
3812FNIEMOP_DEF(iemOp_jle_Jb)
3813{
3814 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3815 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3816 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3817
3818 IEM_MC_BEGIN(0, 0);
3819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3820 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3821 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3822 } IEM_MC_ELSE() {
3823 IEM_MC_ADVANCE_RIP_AND_FINISH();
3824 } IEM_MC_ENDIF();
3825 IEM_MC_END();
3826}
3827
3828
3829/**
3830 * @opcode 0x7f
3831 */
3832FNIEMOP_DEF(iemOp_jnle_Jb)
3833{
3834 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3835 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3836 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3837
3838 IEM_MC_BEGIN(0, 0);
3839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3840 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3841 IEM_MC_ADVANCE_RIP_AND_FINISH();
3842 } IEM_MC_ELSE() {
3843 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3844 } IEM_MC_ENDIF();
3845 IEM_MC_END();
3846}
3847
3848
3849/**
3850 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3851 * iemOp_Grp1_Eb_Ib_80.
3852 */
3853#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
3854 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3855 { \
3856 /* register target */ \
3857 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3858 IEM_MC_BEGIN(3, 0); \
3859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3860 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3861 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3862 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3863 \
3864 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3865 IEM_MC_REF_EFLAGS(pEFlags); \
3866 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3867 \
3868 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3869 IEM_MC_END(); \
3870 } \
3871 else \
3872 { \
3873 /* memory target */ \
3874 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3875 { \
3876 IEM_MC_BEGIN(3, 3); \
3877 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3878 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3880 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3881 \
3882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3883 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3884 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3885 IEMOP_HLP_DONE_DECODING(); \
3886 \
3887 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3888 IEM_MC_FETCH_EFLAGS(EFlags); \
3889 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3890 \
3891 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
3892 IEM_MC_COMMIT_EFLAGS(EFlags); \
3893 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3894 IEM_MC_END(); \
3895 } \
3896 else \
3897 { \
3898 (void)0
3899
3900#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3901 IEM_MC_BEGIN(3, 3); \
3902 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3903 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3905 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3906 \
3907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3908 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3909 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3910 IEMOP_HLP_DONE_DECODING(); \
3911 \
3912 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3913 IEM_MC_FETCH_EFLAGS(EFlags); \
3914 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
3915 \
3916 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
3917 IEM_MC_COMMIT_EFLAGS(EFlags); \
3918 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3919 IEM_MC_END(); \
3920 } \
3921 } \
3922 (void)0
3923
3924#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
3925 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3926 { \
3927 /* register target */ \
3928 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3929 IEM_MC_BEGIN(3, 0); \
3930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3931 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3932 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3933 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3934 \
3935 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3936 IEM_MC_REF_EFLAGS(pEFlags); \
3937 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3938 \
3939 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3940 IEM_MC_END(); \
3941 } \
3942 else \
3943 { \
3944 /* memory target */ \
3945 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3946 { \
3947 IEM_MC_BEGIN(3, 3); \
3948 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
3949 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3951 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3952 \
3953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3954 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3955 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3956 IEMOP_HLP_DONE_DECODING(); \
3957 \
3958 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3959 IEM_MC_FETCH_EFLAGS(EFlags); \
3960 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3961 \
3962 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
3963 IEM_MC_COMMIT_EFLAGS(EFlags); \
3964 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3965 IEM_MC_END(); \
3966 } \
3967 else \
3968 { \
3969 (void)0
3970
3971#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
3972 IEMOP_HLP_DONE_DECODING(); \
3973 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
3974 } \
3975 } \
3976 (void)0
3977
3978
3979
3980/**
3981 * @opmaps grp1_80,grp1_83
3982 * @opcode /0
3983 */
3984FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
3985{
3986 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
3987 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
3988 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
3989}
3990
3991
3992/**
3993 * @opmaps grp1_80,grp1_83
3994 * @opcode /1
3995 */
3996FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
3997{
3998 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
3999 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
4000 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
4001}
4002
4003
4004/**
4005 * @opmaps grp1_80,grp1_83
4006 * @opcode /2
4007 */
4008FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4009{
4010 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4011 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4012 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4013}
4014
4015
4016/**
4017 * @opmaps grp1_80,grp1_83
4018 * @opcode /3
4019 */
4020FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4021{
4022 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4023 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4024 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4025}
4026
4027
4028/**
4029 * @opmaps grp1_80,grp1_83
4030 * @opcode /4
4031 */
4032FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4033{
4034 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4035 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4036 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4037}
4038
4039
4040/**
4041 * @opmaps grp1_80,grp1_83
4042 * @opcode /5
4043 */
4044FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4045{
4046 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4047 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4048 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4049}
4050
4051
4052/**
4053 * @opmaps grp1_80,grp1_83
4054 * @opcode /6
4055 */
4056FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4057{
4058 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4059 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4060 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4061}
4062
4063
4064/**
4065 * @opmaps grp1_80,grp1_83
4066 * @opcode /7
4067 */
4068FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4069{
4070 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4071 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4072 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4073}
4074
4075
4076/**
4077 * @opcode 0x80
4078 */
4079FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4080{
4081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4082 switch (IEM_GET_MODRM_REG_8(bRm))
4083 {
4084 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4085 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4086 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4087 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4088 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4089 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4090 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4091 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4092 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4093 }
4094}
4095
4096
4097/**
4098 * Body for a group 1 binary operator.
4099 */
4100#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4101 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4102 { \
4103 /* register target */ \
4104 switch (pVCpu->iem.s.enmEffOpSize) \
4105 { \
4106 case IEMMODE_16BIT: \
4107 { \
4108 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4109 IEM_MC_BEGIN(3, 0); \
4110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4111 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4112 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4113 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4114 \
4115 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4116 IEM_MC_REF_EFLAGS(pEFlags); \
4117 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4118 \
4119 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4120 IEM_MC_END(); \
4121 break; \
4122 } \
4123 \
4124 case IEMMODE_32BIT: \
4125 { \
4126 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4127 IEM_MC_BEGIN(3, 0); \
4128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4129 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4130 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4131 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4132 \
4133 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4134 IEM_MC_REF_EFLAGS(pEFlags); \
4135 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4136 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
4137 \
4138 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4139 IEM_MC_END(); \
4140 break; \
4141 } \
4142 \
4143 case IEMMODE_64BIT: \
4144 { \
4145 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4146 IEM_MC_BEGIN(3, 0); \
4147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4148 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4149 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4150 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4151 \
4152 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4153 IEM_MC_REF_EFLAGS(pEFlags); \
4154 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4155 \
4156 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4157 IEM_MC_END(); \
4158 break; \
4159 } \
4160 \
4161 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4162 } \
4163 } \
4164 else \
4165 { \
4166 /* memory target */ \
4167 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4168 { \
4169 switch (pVCpu->iem.s.enmEffOpSize) \
4170 { \
4171 case IEMMODE_16BIT: \
4172 { \
4173 IEM_MC_BEGIN(3, 3); \
4174 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4175 IEM_MC_ARG(uint16_t, u16Src, 1); \
4176 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4178 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4179 \
4180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4181 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4182 IEM_MC_ASSIGN(u16Src, u16Imm); \
4183 IEMOP_HLP_DONE_DECODING(); \
4184 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4185 IEM_MC_FETCH_EFLAGS(EFlags); \
4186 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4187 \
4188 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4189 IEM_MC_COMMIT_EFLAGS(EFlags); \
4190 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4191 IEM_MC_END(); \
4192 break; \
4193 } \
4194 \
4195 case IEMMODE_32BIT: \
4196 { \
4197 IEM_MC_BEGIN(3, 3); \
4198 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4199 IEM_MC_ARG(uint32_t, u32Src, 1); \
4200 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4202 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4203 \
4204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4205 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4206 IEM_MC_ASSIGN(u32Src, u32Imm); \
4207 IEMOP_HLP_DONE_DECODING(); \
4208 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4209 IEM_MC_FETCH_EFLAGS(EFlags); \
4210 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4211 \
4212 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4213 IEM_MC_COMMIT_EFLAGS(EFlags); \
4214 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4215 IEM_MC_END(); \
4216 break; \
4217 } \
4218 \
4219 case IEMMODE_64BIT: \
4220 { \
4221 IEM_MC_BEGIN(3, 3); \
4222 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4223 IEM_MC_ARG(uint64_t, u64Src, 1); \
4224 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4226 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4227 \
4228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4229 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4230 IEMOP_HLP_DONE_DECODING(); \
4231 IEM_MC_ASSIGN(u64Src, u64Imm); \
4232 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4233 IEM_MC_FETCH_EFLAGS(EFlags); \
4234 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4235 \
4236 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4237 IEM_MC_COMMIT_EFLAGS(EFlags); \
4238 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4239 IEM_MC_END(); \
4240 break; \
4241 } \
4242 \
4243 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4244 } \
4245 } \
4246 else \
4247 { \
4248 (void)0
4249/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4250#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4251 switch (pVCpu->iem.s.enmEffOpSize) \
4252 { \
4253 case IEMMODE_16BIT: \
4254 { \
4255 IEM_MC_BEGIN(3, 3); \
4256 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4257 IEM_MC_ARG(uint16_t, u16Src, 1); \
4258 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4260 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4261 \
4262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4263 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4264 IEM_MC_ASSIGN(u16Src, u16Imm); \
4265 IEMOP_HLP_DONE_DECODING(); \
4266 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4267 IEM_MC_FETCH_EFLAGS(EFlags); \
4268 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4269 \
4270 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4271 IEM_MC_COMMIT_EFLAGS(EFlags); \
4272 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4273 IEM_MC_END(); \
4274 break; \
4275 } \
4276 \
4277 case IEMMODE_32BIT: \
4278 { \
4279 IEM_MC_BEGIN(3, 3); \
4280 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4281 IEM_MC_ARG(uint32_t, u32Src, 1); \
4282 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4284 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4285 \
4286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4287 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4288 IEM_MC_ASSIGN(u32Src, u32Imm); \
4289 IEMOP_HLP_DONE_DECODING(); \
4290 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4291 IEM_MC_FETCH_EFLAGS(EFlags); \
4292 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4293 \
4294 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4295 IEM_MC_COMMIT_EFLAGS(EFlags); \
4296 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4297 IEM_MC_END(); \
4298 break; \
4299 } \
4300 \
4301 case IEMMODE_64BIT: \
4302 { \
4303 IEM_MC_BEGIN(3, 3); \
4304 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4305 IEM_MC_ARG(uint64_t, u64Src, 1); \
4306 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4308 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4309 \
4310 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4311 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4312 IEMOP_HLP_DONE_DECODING(); \
4313 IEM_MC_ASSIGN(u64Src, u64Imm); \
4314 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4315 IEM_MC_FETCH_EFLAGS(EFlags); \
4316 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4317 \
4318 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4319 IEM_MC_COMMIT_EFLAGS(EFlags); \
4320 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4321 IEM_MC_END(); \
4322 break; \
4323 } \
4324 \
4325 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4326 } \
4327 } \
4328 } \
4329 (void)0
4330
4331/* read-only version */
4332#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4333 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4334 { \
4335 /* register target */ \
4336 switch (pVCpu->iem.s.enmEffOpSize) \
4337 { \
4338 case IEMMODE_16BIT: \
4339 { \
4340 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4341 IEM_MC_BEGIN(3, 0); \
4342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4343 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4344 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4345 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4346 \
4347 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4348 IEM_MC_REF_EFLAGS(pEFlags); \
4349 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4350 \
4351 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4352 IEM_MC_END(); \
4353 break; \
4354 } \
4355 \
4356 case IEMMODE_32BIT: \
4357 { \
4358 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4359 IEM_MC_BEGIN(3, 0); \
4360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4361 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4362 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4363 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4364 \
4365 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4366 IEM_MC_REF_EFLAGS(pEFlags); \
4367 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4368 \
4369 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4370 IEM_MC_END(); \
4371 break; \
4372 } \
4373 \
4374 case IEMMODE_64BIT: \
4375 { \
4376 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4377 IEM_MC_BEGIN(3, 0); \
4378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4379 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4380 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4381 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4382 \
4383 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4384 IEM_MC_REF_EFLAGS(pEFlags); \
4385 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4386 \
4387 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4388 IEM_MC_END(); \
4389 break; \
4390 } \
4391 \
4392 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4393 } \
4394 } \
4395 else \
4396 { \
4397 /* memory target */ \
4398 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4399 { \
4400 switch (pVCpu->iem.s.enmEffOpSize) \
4401 { \
4402 case IEMMODE_16BIT: \
4403 { \
4404 IEM_MC_BEGIN(3, 3); \
4405 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4406 IEM_MC_ARG(uint16_t, u16Src, 1); \
4407 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4409 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4410 \
4411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4412 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4413 IEM_MC_ASSIGN(u16Src, u16Imm); \
4414 IEMOP_HLP_DONE_DECODING(); \
4415 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4416 IEM_MC_FETCH_EFLAGS(EFlags); \
4417 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4418 \
4419 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
4420 IEM_MC_COMMIT_EFLAGS(EFlags); \
4421 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4422 IEM_MC_END(); \
4423 break; \
4424 } \
4425 \
4426 case IEMMODE_32BIT: \
4427 { \
4428 IEM_MC_BEGIN(3, 3); \
4429 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4430 IEM_MC_ARG(uint32_t, u32Src, 1); \
4431 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4433 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4434 \
4435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4436 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4437 IEM_MC_ASSIGN(u32Src, u32Imm); \
4438 IEMOP_HLP_DONE_DECODING(); \
4439 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4440 IEM_MC_FETCH_EFLAGS(EFlags); \
4441 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4442 \
4443 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
4444 IEM_MC_COMMIT_EFLAGS(EFlags); \
4445 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4446 IEM_MC_END(); \
4447 break; \
4448 } \
4449 \
4450 case IEMMODE_64BIT: \
4451 { \
4452 IEM_MC_BEGIN(3, 3); \
4453 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4454 IEM_MC_ARG(uint64_t, u64Src, 1); \
4455 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4457 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4458 \
4459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4460 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4461 IEMOP_HLP_DONE_DECODING(); \
4462 IEM_MC_ASSIGN(u64Src, u64Imm); \
4463 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4464 IEM_MC_FETCH_EFLAGS(EFlags); \
4465 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4466 \
4467 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
4468 IEM_MC_COMMIT_EFLAGS(EFlags); \
4469 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4470 IEM_MC_END(); \
4471 break; \
4472 } \
4473 \
4474 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4475 } \
4476 } \
4477 else \
4478 { \
4479 IEMOP_HLP_DONE_DECODING(); \
4480 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4481 } \
4482 } \
4483 (void)0
4484
4485
4486/**
4487 * @opmaps grp1_81
4488 * @opcode /0
4489 */
4490FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4491{
4492 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4493 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4494 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4495}
4496
4497
4498/**
4499 * @opmaps grp1_81
4500 * @opcode /1
4501 */
4502FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4503{
4504 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4505 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4506 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4507}
4508
4509
4510/**
4511 * @opmaps grp1_81
4512 * @opcode /2
4513 */
4514FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4515{
4516 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4517 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4518 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4519}
4520
4521
4522/**
4523 * @opmaps grp1_81
4524 * @opcode /3
4525 */
4526FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4527{
4528 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4529 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4530 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4531}
4532
4533
4534/**
4535 * @opmaps grp1_81
4536 * @opcode /4
4537 */
4538FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4539{
4540 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4541 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4542 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4543}
4544
4545
4546/**
4547 * @opmaps grp1_81
4548 * @opcode /5
4549 */
4550FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4551{
4552 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4553 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4554 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4555}
4556
4557
4558/**
4559 * @opmaps grp1_81
4560 * @opcode /6
4561 */
4562FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4563{
4564 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4565 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4566 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4567}
4568
4569
4570/**
4571 * @opmaps grp1_81
4572 * @opcode /7
4573 */
4574FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4575{
4576 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4577 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4578}
4579
4580
4581/**
4582 * @opcode 0x81
4583 */
4584FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4585{
4586 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4587 switch (IEM_GET_MODRM_REG_8(bRm))
4588 {
4589 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4590 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4591 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4592 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4593 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4594 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4595 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4596 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4597 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4598 }
4599}
4600
4601
4602/**
4603 * @opcode 0x82
4604 * @opmnemonic grp1_82
4605 * @opgroup og_groups
4606 */
4607FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4608{
4609 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4610 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4611}
4612
4613
4614/**
4615 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4616 * iemOp_Grp1_Ev_Ib.
4617 */
4618#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4619 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4620 { \
4621 /* \
4622 * Register target \
4623 */ \
4624 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4625 switch (pVCpu->iem.s.enmEffOpSize) \
4626 { \
4627 case IEMMODE_16BIT: \
4628 { \
4629 IEM_MC_BEGIN(3, 0); \
4630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4631 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4632 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4633 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4634 \
4635 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4636 IEM_MC_REF_EFLAGS(pEFlags); \
4637 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4638 \
4639 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4640 IEM_MC_END(); \
4641 break; \
4642 } \
4643 \
4644 case IEMMODE_32BIT: \
4645 { \
4646 IEM_MC_BEGIN(3, 0); \
4647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4648 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4649 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4650 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4651 \
4652 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4653 IEM_MC_REF_EFLAGS(pEFlags); \
4654 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4655 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
4656 \
4657 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4658 IEM_MC_END(); \
4659 break; \
4660 } \
4661 \
4662 case IEMMODE_64BIT: \
4663 { \
4664 IEM_MC_BEGIN(3, 0); \
4665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4666 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4667 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4668 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4669 \
4670 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4671 IEM_MC_REF_EFLAGS(pEFlags); \
4672 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4673 \
4674 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4675 IEM_MC_END(); \
4676 break; \
4677 } \
4678 \
4679 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4680 } \
4681 } \
4682 else \
4683 { \
4684 /* \
4685 * Memory target. \
4686 */ \
4687 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4688 { \
4689 switch (pVCpu->iem.s.enmEffOpSize) \
4690 { \
4691 case IEMMODE_16BIT: \
4692 { \
4693 IEM_MC_BEGIN(3, 3); \
4694 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4695 IEM_MC_ARG(uint16_t, u16Src, 1); \
4696 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4698 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4699 \
4700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4701 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4702 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4703 IEMOP_HLP_DONE_DECODING(); \
4704 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4705 IEM_MC_FETCH_EFLAGS(EFlags); \
4706 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4707 \
4708 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4709 IEM_MC_COMMIT_EFLAGS(EFlags); \
4710 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4711 IEM_MC_END(); \
4712 break; \
4713 } \
4714 \
4715 case IEMMODE_32BIT: \
4716 { \
4717 IEM_MC_BEGIN(3, 3); \
4718 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4719 IEM_MC_ARG(uint32_t, u32Src, 1); \
4720 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4722 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4723 \
4724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4725 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4726 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4727 IEMOP_HLP_DONE_DECODING(); \
4728 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4729 IEM_MC_FETCH_EFLAGS(EFlags); \
4730 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4731 \
4732 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4733 IEM_MC_COMMIT_EFLAGS(EFlags); \
4734 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4735 IEM_MC_END(); \
4736 break; \
4737 } \
4738 \
4739 case IEMMODE_64BIT: \
4740 { \
4741 IEM_MC_BEGIN(3, 3); \
4742 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4743 IEM_MC_ARG(uint64_t, u64Src, 1); \
4744 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4746 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4747 \
4748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4749 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4750 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4751 IEMOP_HLP_DONE_DECODING(); \
4752 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4753 IEM_MC_FETCH_EFLAGS(EFlags); \
4754 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4755 \
4756 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4757 IEM_MC_COMMIT_EFLAGS(EFlags); \
4758 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4759 IEM_MC_END(); \
4760 break; \
4761 } \
4762 \
4763 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4764 } \
4765 } \
4766 else \
4767 { \
4768 (void)0
4769/* Separate macro to work around parsing issue in IEMAllInstPython.py */
4770#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4771 switch (pVCpu->iem.s.enmEffOpSize) \
4772 { \
4773 case IEMMODE_16BIT: \
4774 { \
4775 IEM_MC_BEGIN(3, 3); \
4776 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4777 IEM_MC_ARG(uint16_t, u16Src, 1); \
4778 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4780 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4781 \
4782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4783 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4784 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4785 IEMOP_HLP_DONE_DECODING(); \
4786 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4787 IEM_MC_FETCH_EFLAGS(EFlags); \
4788 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4789 \
4790 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4791 IEM_MC_COMMIT_EFLAGS(EFlags); \
4792 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4793 IEM_MC_END(); \
4794 break; \
4795 } \
4796 \
4797 case IEMMODE_32BIT: \
4798 { \
4799 IEM_MC_BEGIN(3, 3); \
4800 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4801 IEM_MC_ARG(uint32_t, u32Src, 1); \
4802 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4803 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4804 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4805 \
4806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4807 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4808 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4809 IEMOP_HLP_DONE_DECODING(); \
4810 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4811 IEM_MC_FETCH_EFLAGS(EFlags); \
4812 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4813 \
4814 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4815 IEM_MC_COMMIT_EFLAGS(EFlags); \
4816 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4817 IEM_MC_END(); \
4818 break; \
4819 } \
4820 \
4821 case IEMMODE_64BIT: \
4822 { \
4823 IEM_MC_BEGIN(3, 3); \
4824 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4825 IEM_MC_ARG(uint64_t, u64Src, 1); \
4826 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4828 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4829 \
4830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4831 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4832 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4833 IEMOP_HLP_DONE_DECODING(); \
4834 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4835 IEM_MC_FETCH_EFLAGS(EFlags); \
4836 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4837 \
4838 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4839 IEM_MC_COMMIT_EFLAGS(EFlags); \
4840 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4841 IEM_MC_END(); \
4842 break; \
4843 } \
4844 \
4845 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4846 } \
4847 } \
4848 } \
4849 (void)0
4850
4851/* read-only variant */
4852#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4853 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4854 { \
4855 /* \
4856 * Register target \
4857 */ \
4858 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4859 switch (pVCpu->iem.s.enmEffOpSize) \
4860 { \
4861 case IEMMODE_16BIT: \
4862 { \
4863 IEM_MC_BEGIN(3, 0); \
4864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4865 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4866 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4867 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4868 \
4869 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4870 IEM_MC_REF_EFLAGS(pEFlags); \
4871 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4872 \
4873 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4874 IEM_MC_END(); \
4875 break; \
4876 } \
4877 \
4878 case IEMMODE_32BIT: \
4879 { \
4880 IEM_MC_BEGIN(3, 0); \
4881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4882 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4883 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4884 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4885 \
4886 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4887 IEM_MC_REF_EFLAGS(pEFlags); \
4888 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4889 \
4890 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4891 IEM_MC_END(); \
4892 break; \
4893 } \
4894 \
4895 case IEMMODE_64BIT: \
4896 { \
4897 IEM_MC_BEGIN(3, 0); \
4898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4899 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4900 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4901 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4902 \
4903 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4904 IEM_MC_REF_EFLAGS(pEFlags); \
4905 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4906 \
4907 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4908 IEM_MC_END(); \
4909 break; \
4910 } \
4911 \
4912 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4913 } \
4914 } \
4915 else \
4916 { \
4917 /* \
4918 * Memory target. \
4919 */ \
4920 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4921 { \
4922 switch (pVCpu->iem.s.enmEffOpSize) \
4923 { \
4924 case IEMMODE_16BIT: \
4925 { \
4926 IEM_MC_BEGIN(3, 3); \
4927 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4928 IEM_MC_ARG(uint16_t, u16Src, 1); \
4929 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4931 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4932 \
4933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4934 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4935 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4936 IEMOP_HLP_DONE_DECODING(); \
4937 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4938 IEM_MC_FETCH_EFLAGS(EFlags); \
4939 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4940 \
4941 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
4942 IEM_MC_COMMIT_EFLAGS(EFlags); \
4943 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4944 IEM_MC_END(); \
4945 break; \
4946 } \
4947 \
4948 case IEMMODE_32BIT: \
4949 { \
4950 IEM_MC_BEGIN(3, 3); \
4951 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4952 IEM_MC_ARG(uint32_t, u32Src, 1); \
4953 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4955 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4956 \
4957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4958 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4959 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4960 IEMOP_HLP_DONE_DECODING(); \
4961 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4962 IEM_MC_FETCH_EFLAGS(EFlags); \
4963 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4964 \
4965 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
4966 IEM_MC_COMMIT_EFLAGS(EFlags); \
4967 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4968 IEM_MC_END(); \
4969 break; \
4970 } \
4971 \
4972 case IEMMODE_64BIT: \
4973 { \
4974 IEM_MC_BEGIN(3, 3); \
4975 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4976 IEM_MC_ARG(uint64_t, u64Src, 1); \
4977 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4979 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4980 \
4981 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4982 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4983 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4984 IEMOP_HLP_DONE_DECODING(); \
4985 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4986 IEM_MC_FETCH_EFLAGS(EFlags); \
4987 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4988 \
4989 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
4990 IEM_MC_COMMIT_EFLAGS(EFlags); \
4991 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4992 IEM_MC_END(); \
4993 break; \
4994 } \
4995 \
4996 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4997 } \
4998 } \
4999 else \
5000 { \
5001 IEMOP_HLP_DONE_DECODING(); \
5002 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5003 } \
5004 } \
5005 (void)0
5006
5007/**
5008 * @opmaps grp1_83
5009 * @opcode /0
5010 */
5011FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5012{
5013 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5014 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
5015 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
5016}
5017
5018
5019/**
5020 * @opmaps grp1_83
5021 * @opcode /1
5022 */
5023FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5024{
5025 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5026 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
5027 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
5028}
5029
5030
5031/**
5032 * @opmaps grp1_83
5033 * @opcode /2
5034 */
5035FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5036{
5037 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5038 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5039 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5040}
5041
5042
5043/**
5044 * @opmaps grp1_83
5045 * @opcode /3
5046 */
5047FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5048{
5049 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5050 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5051 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5052}
5053
5054
5055/**
5056 * @opmaps grp1_83
5057 * @opcode /4
5058 */
5059FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5060{
5061 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5062 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5063 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5064}
5065
5066
5067/**
5068 * @opmaps grp1_83
5069 * @opcode /5
5070 */
5071FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5072{
5073 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5074 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5075 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5076}
5077
5078
5079/**
5080 * @opmaps grp1_83
5081 * @opcode /6
5082 */
5083FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5084{
5085 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5086 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5087 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5088}
5089
5090
5091/**
5092 * @opmaps grp1_83
5093 * @opcode /7
5094 */
5095FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5096{
5097 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5098 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5099}
5100
5101
5102/**
5103 * @opcode 0x83
5104 */
5105FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5106{
5107 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5108 to the 386 even if absent in the intel reference manuals and some
5109 3rd party opcode listings. */
5110 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5111 switch (IEM_GET_MODRM_REG_8(bRm))
5112 {
5113 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5114 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5115 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5116 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5117 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5118 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5119 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5120 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5121 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5122 }
5123}
5124
5125
5126/**
5127 * @opcode 0x84
5128 */
5129FNIEMOP_DEF(iemOp_test_Eb_Gb)
5130{
5131 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5132 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5133 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8);
5134 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
5135}
5136
5137
5138/**
5139 * @opcode 0x85
5140 */
5141FNIEMOP_DEF(iemOp_test_Ev_Gv)
5142{
5143 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5144 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5145 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64);
5146}
5147
5148
5149/**
5150 * @opcode 0x86
5151 */
5152FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5153{
5154 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5155 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5156
5157 /*
5158 * If rm is denoting a register, no more instruction bytes.
5159 */
5160 if (IEM_IS_MODRM_REG_MODE(bRm))
5161 {
5162 IEM_MC_BEGIN(0, 2);
5163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5164 IEM_MC_LOCAL(uint8_t, uTmp1);
5165 IEM_MC_LOCAL(uint8_t, uTmp2);
5166
5167 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5168 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5169 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5170 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5171
5172 IEM_MC_ADVANCE_RIP_AND_FINISH();
5173 IEM_MC_END();
5174 }
5175 else
5176 {
5177 /*
5178 * We're accessing memory.
5179 */
5180 IEM_MC_BEGIN(2, 4);
5181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5182 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5183 IEM_MC_LOCAL(uint8_t, uTmpReg);
5184 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
5185 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1);
5186
5187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5189 IEM_MC_MEM_MAP_U8_RW(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5190 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5191 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5192 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
5193 else
5194 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
5195 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Mem, bUnmapInfo);
5196 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5197
5198 IEM_MC_ADVANCE_RIP_AND_FINISH();
5199 IEM_MC_END();
5200 }
5201}
5202
5203
5204/**
5205 * @opcode 0x87
5206 */
5207FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5208{
5209 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5210 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5211
5212 /*
5213 * If rm is denoting a register, no more instruction bytes.
5214 */
5215 if (IEM_IS_MODRM_REG_MODE(bRm))
5216 {
5217 switch (pVCpu->iem.s.enmEffOpSize)
5218 {
5219 case IEMMODE_16BIT:
5220 IEM_MC_BEGIN(0, 2);
5221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5222 IEM_MC_LOCAL(uint16_t, uTmp1);
5223 IEM_MC_LOCAL(uint16_t, uTmp2);
5224
5225 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5226 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5227 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5228 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5229
5230 IEM_MC_ADVANCE_RIP_AND_FINISH();
5231 IEM_MC_END();
5232 break;
5233
5234 case IEMMODE_32BIT:
5235 IEM_MC_BEGIN(0, 2);
5236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5237 IEM_MC_LOCAL(uint32_t, uTmp1);
5238 IEM_MC_LOCAL(uint32_t, uTmp2);
5239
5240 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5241 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5242 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5243 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5244
5245 IEM_MC_ADVANCE_RIP_AND_FINISH();
5246 IEM_MC_END();
5247 break;
5248
5249 case IEMMODE_64BIT:
5250 IEM_MC_BEGIN(0, 2);
5251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5252 IEM_MC_LOCAL(uint64_t, uTmp1);
5253 IEM_MC_LOCAL(uint64_t, uTmp2);
5254
5255 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5256 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5257 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5258 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5259
5260 IEM_MC_ADVANCE_RIP_AND_FINISH();
5261 IEM_MC_END();
5262 break;
5263
5264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5265 }
5266 }
5267 else
5268 {
5269 /*
5270 * We're accessing memory.
5271 */
5272 switch (pVCpu->iem.s.enmEffOpSize)
5273 {
5274 case IEMMODE_16BIT:
5275 IEM_MC_BEGIN(2, 4);
5276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5277 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5278 IEM_MC_LOCAL(uint16_t, uTmpReg);
5279 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
5280 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1);
5281
5282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5284 IEM_MC_MEM_MAP_U16_RW(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5285 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5286 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5287 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
5288 else
5289 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
5290 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Mem, bUnmapInfo);
5291 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5292
5293 IEM_MC_ADVANCE_RIP_AND_FINISH();
5294 IEM_MC_END();
5295 break;
5296
5297 case IEMMODE_32BIT:
5298 IEM_MC_BEGIN(2, 4);
5299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5300 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5301 IEM_MC_LOCAL(uint32_t, uTmpReg);
5302 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
5303 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1);
5304
5305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5307 IEM_MC_MEM_MAP_U32_RW(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5308 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5309 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5310 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
5311 else
5312 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
5313 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Mem, bUnmapInfo);
5314 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5315
5316 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
5317 IEM_MC_ADVANCE_RIP_AND_FINISH();
5318 IEM_MC_END();
5319 break;
5320
5321 case IEMMODE_64BIT:
5322 IEM_MC_BEGIN(2, 4);
5323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5324 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5325 IEM_MC_LOCAL(uint64_t, uTmpReg);
5326 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
5327 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1);
5328
5329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5331 IEM_MC_MEM_MAP_U64_RW(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5332 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5333 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5334 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
5335 else
5336 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
5337 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Mem, bUnmapInfo);
5338 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5339
5340 IEM_MC_ADVANCE_RIP_AND_FINISH();
5341 IEM_MC_END();
5342 break;
5343
5344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5345 }
5346 }
5347}
5348
5349
5350/**
5351 * @opcode 0x88
5352 */
5353FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5354{
5355 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5356
5357 uint8_t bRm;
5358 IEM_OPCODE_GET_NEXT_U8(&bRm);
5359
5360 /*
5361 * If rm is denoting a register, no more instruction bytes.
5362 */
5363 if (IEM_IS_MODRM_REG_MODE(bRm))
5364 {
5365 IEM_MC_BEGIN(0, 1);
5366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5367 IEM_MC_LOCAL(uint8_t, u8Value);
5368 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5369 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5370 IEM_MC_ADVANCE_RIP_AND_FINISH();
5371 IEM_MC_END();
5372 }
5373 else
5374 {
5375 /*
5376 * We're writing a register to memory.
5377 */
5378 IEM_MC_BEGIN(0, 2);
5379 IEM_MC_LOCAL(uint8_t, u8Value);
5380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5383 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5384 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5385 IEM_MC_ADVANCE_RIP_AND_FINISH();
5386 IEM_MC_END();
5387 }
5388}
5389
5390
5391/**
5392 * @opcode 0x89
5393 */
5394FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5395{
5396 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5397
5398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5399
5400 /*
5401 * If rm is denoting a register, no more instruction bytes.
5402 */
5403 if (IEM_IS_MODRM_REG_MODE(bRm))
5404 {
5405 switch (pVCpu->iem.s.enmEffOpSize)
5406 {
5407 case IEMMODE_16BIT:
5408 IEM_MC_BEGIN(0, 1);
5409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5410 IEM_MC_LOCAL(uint16_t, u16Value);
5411 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5412 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5413 IEM_MC_ADVANCE_RIP_AND_FINISH();
5414 IEM_MC_END();
5415 break;
5416
5417 case IEMMODE_32BIT:
5418 IEM_MC_BEGIN(0, 1);
5419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5420 IEM_MC_LOCAL(uint32_t, u32Value);
5421 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5422 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5423 IEM_MC_ADVANCE_RIP_AND_FINISH();
5424 IEM_MC_END();
5425 break;
5426
5427 case IEMMODE_64BIT:
5428 IEM_MC_BEGIN(0, 1);
5429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5430 IEM_MC_LOCAL(uint64_t, u64Value);
5431 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5432 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5433 IEM_MC_ADVANCE_RIP_AND_FINISH();
5434 IEM_MC_END();
5435 break;
5436
5437 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5438 }
5439 }
5440 else
5441 {
5442 /*
5443 * We're writing a register to memory.
5444 */
5445 switch (pVCpu->iem.s.enmEffOpSize)
5446 {
5447 case IEMMODE_16BIT:
5448 IEM_MC_BEGIN(0, 2);
5449 IEM_MC_LOCAL(uint16_t, u16Value);
5450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5453 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5454 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5455 IEM_MC_ADVANCE_RIP_AND_FINISH();
5456 IEM_MC_END();
5457 break;
5458
5459 case IEMMODE_32BIT:
5460 IEM_MC_BEGIN(0, 2);
5461 IEM_MC_LOCAL(uint32_t, u32Value);
5462 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5465 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5466 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5467 IEM_MC_ADVANCE_RIP_AND_FINISH();
5468 IEM_MC_END();
5469 break;
5470
5471 case IEMMODE_64BIT:
5472 IEM_MC_BEGIN(0, 2);
5473 IEM_MC_LOCAL(uint64_t, u64Value);
5474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5477 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5478 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5479 IEM_MC_ADVANCE_RIP_AND_FINISH();
5480 IEM_MC_END();
5481 break;
5482
5483 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5484 }
5485 }
5486}
5487
5488
5489/**
5490 * @opcode 0x8a
5491 */
5492FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5493{
5494 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5495
5496 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5497
5498 /*
5499 * If rm is denoting a register, no more instruction bytes.
5500 */
5501 if (IEM_IS_MODRM_REG_MODE(bRm))
5502 {
5503 IEM_MC_BEGIN(0, 1);
5504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5505 IEM_MC_LOCAL(uint8_t, u8Value);
5506 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5507 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5508 IEM_MC_ADVANCE_RIP_AND_FINISH();
5509 IEM_MC_END();
5510 }
5511 else
5512 {
5513 /*
5514 * We're loading a register from memory.
5515 */
5516 IEM_MC_BEGIN(0, 2);
5517 IEM_MC_LOCAL(uint8_t, u8Value);
5518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5521 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5522 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5523 IEM_MC_ADVANCE_RIP_AND_FINISH();
5524 IEM_MC_END();
5525 }
5526}
5527
5528
5529/**
5530 * @opcode 0x8b
5531 */
5532FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5533{
5534 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5535
5536 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5537
5538 /*
5539 * If rm is denoting a register, no more instruction bytes.
5540 */
5541 if (IEM_IS_MODRM_REG_MODE(bRm))
5542 {
5543 switch (pVCpu->iem.s.enmEffOpSize)
5544 {
5545 case IEMMODE_16BIT:
5546 IEM_MC_BEGIN(0, 1);
5547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5548 IEM_MC_LOCAL(uint16_t, u16Value);
5549 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5550 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5551 IEM_MC_ADVANCE_RIP_AND_FINISH();
5552 IEM_MC_END();
5553 break;
5554
5555 case IEMMODE_32BIT:
5556 IEM_MC_BEGIN(0, 1);
5557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5558 IEM_MC_LOCAL(uint32_t, u32Value);
5559 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5560 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5561 IEM_MC_ADVANCE_RIP_AND_FINISH();
5562 IEM_MC_END();
5563 break;
5564
5565 case IEMMODE_64BIT:
5566 IEM_MC_BEGIN(0, 1);
5567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5568 IEM_MC_LOCAL(uint64_t, u64Value);
5569 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5570 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5571 IEM_MC_ADVANCE_RIP_AND_FINISH();
5572 IEM_MC_END();
5573 break;
5574
5575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5576 }
5577 }
5578 else
5579 {
5580 /*
5581 * We're loading a register from memory.
5582 */
5583 switch (pVCpu->iem.s.enmEffOpSize)
5584 {
5585 case IEMMODE_16BIT:
5586 IEM_MC_BEGIN(0, 2);
5587 IEM_MC_LOCAL(uint16_t, u16Value);
5588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5589 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5591 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5592 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5593 IEM_MC_ADVANCE_RIP_AND_FINISH();
5594 IEM_MC_END();
5595 break;
5596
5597 case IEMMODE_32BIT:
5598 IEM_MC_BEGIN(0, 2);
5599 IEM_MC_LOCAL(uint32_t, u32Value);
5600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5603 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5604 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5605 IEM_MC_ADVANCE_RIP_AND_FINISH();
5606 IEM_MC_END();
5607 break;
5608
5609 case IEMMODE_64BIT:
5610 IEM_MC_BEGIN(0, 2);
5611 IEM_MC_LOCAL(uint64_t, u64Value);
5612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5615 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5616 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5617 IEM_MC_ADVANCE_RIP_AND_FINISH();
5618 IEM_MC_END();
5619 break;
5620
5621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5622 }
5623 }
5624}
5625
5626
5627/**
5628 * opcode 0x63
5629 * @todo Table fixme
5630 */
5631FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5632{
5633 if (!IEM_IS_64BIT_CODE(pVCpu))
5634 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5635 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5636 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5637 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5638}
5639
5640
5641/**
5642 * @opcode 0x8c
5643 */
5644FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5645{
5646 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5647
5648 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5649
5650 /*
5651 * Check that the destination register exists. The REX.R prefix is ignored.
5652 */
5653 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5654 if (iSegReg > X86_SREG_GS)
5655 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5656
5657 /*
5658 * If rm is denoting a register, no more instruction bytes.
5659 * In that case, the operand size is respected and the upper bits are
5660 * cleared (starting with some pentium).
5661 */
5662 if (IEM_IS_MODRM_REG_MODE(bRm))
5663 {
5664 switch (pVCpu->iem.s.enmEffOpSize)
5665 {
5666 case IEMMODE_16BIT:
5667 IEM_MC_BEGIN(0, 1);
5668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5669 IEM_MC_LOCAL(uint16_t, u16Value);
5670 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5671 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5672 IEM_MC_ADVANCE_RIP_AND_FINISH();
5673 IEM_MC_END();
5674 break;
5675
5676 case IEMMODE_32BIT:
5677 IEM_MC_BEGIN(0, 1);
5678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5679 IEM_MC_LOCAL(uint32_t, u32Value);
5680 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5681 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5682 IEM_MC_ADVANCE_RIP_AND_FINISH();
5683 IEM_MC_END();
5684 break;
5685
5686 case IEMMODE_64BIT:
5687 IEM_MC_BEGIN(0, 1);
5688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5689 IEM_MC_LOCAL(uint64_t, u64Value);
5690 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5691 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5692 IEM_MC_ADVANCE_RIP_AND_FINISH();
5693 IEM_MC_END();
5694 break;
5695
5696 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5697 }
5698 }
5699 else
5700 {
5701 /*
5702 * We're saving the register to memory. The access is word sized
5703 * regardless of operand size prefixes.
5704 */
5705#if 0 /* not necessary */
5706 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5707#endif
5708 IEM_MC_BEGIN(0, 2);
5709 IEM_MC_LOCAL(uint16_t, u16Value);
5710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5713 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5714 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5715 IEM_MC_ADVANCE_RIP_AND_FINISH();
5716 IEM_MC_END();
5717 }
5718}
5719
5720
5721
5722
5723/**
5724 * @opcode 0x8d
5725 */
5726FNIEMOP_DEF(iemOp_lea_Gv_M)
5727{
5728 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5729 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5730 if (IEM_IS_MODRM_REG_MODE(bRm))
5731 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5732
5733 switch (pVCpu->iem.s.enmEffOpSize)
5734 {
5735 case IEMMODE_16BIT:
5736 IEM_MC_BEGIN(0, 2);
5737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5738 IEM_MC_LOCAL(uint16_t, u16Cast);
5739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5741 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5742 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5743 IEM_MC_ADVANCE_RIP_AND_FINISH();
5744 IEM_MC_END();
5745 break;
5746
5747 case IEMMODE_32BIT:
5748 IEM_MC_BEGIN(0, 2);
5749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5750 IEM_MC_LOCAL(uint32_t, u32Cast);
5751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5753 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5754 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5755 IEM_MC_ADVANCE_RIP_AND_FINISH();
5756 IEM_MC_END();
5757 break;
5758
5759 case IEMMODE_64BIT:
5760 IEM_MC_BEGIN(0, 1);
5761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5764 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5765 IEM_MC_ADVANCE_RIP_AND_FINISH();
5766 IEM_MC_END();
5767 break;
5768
5769 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5770 }
5771}
5772
5773
5774/**
5775 * @opcode 0x8e
5776 */
5777FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5778{
5779 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5780
5781 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5782
5783 /*
5784 * The practical operand size is 16-bit.
5785 */
5786#if 0 /* not necessary */
5787 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5788#endif
5789
5790 /*
5791 * Check that the destination register exists and can be used with this
5792 * instruction. The REX.R prefix is ignored.
5793 */
5794 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5795 /** @todo r=bird: What does 8086 do here wrt CS? */
5796 if ( iSegReg == X86_SREG_CS
5797 || iSegReg > X86_SREG_GS)
5798 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5799
5800 /*
5801 * If rm is denoting a register, no more instruction bytes.
5802 */
5803 if (IEM_IS_MODRM_REG_MODE(bRm))
5804 {
5805 IEM_MC_BEGIN(2, 0);
5806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5807 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5808 IEM_MC_ARG(uint16_t, u16Value, 1);
5809 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5810 if (iSRegArg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5811 IEM_MC_CALL_CIMPL_2( 0, iemCImpl_load_SReg, iSRegArg, u16Value);
5812 else
5813 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE, iemCImpl_load_SReg, iSRegArg, u16Value);
5814 IEM_MC_END();
5815 }
5816 else
5817 {
5818 /*
5819 * We're loading the register from memory. The access is word sized
5820 * regardless of operand size prefixes.
5821 */
5822 IEM_MC_BEGIN(2, 1);
5823 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5824 IEM_MC_ARG(uint16_t, u16Value, 1);
5825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5828 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5829 if (iSRegArg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5830 IEM_MC_CALL_CIMPL_2( 0, iemCImpl_load_SReg, iSRegArg, u16Value);
5831 else
5832 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE, iemCImpl_load_SReg, iSRegArg, u16Value);
5833 IEM_MC_END();
5834 }
5835}
5836
5837
5838/** Opcode 0x8f /0. */
5839FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5840{
5841 /* This bugger is rather annoying as it requires rSP to be updated before
5842 doing the effective address calculations. Will eventually require a
5843 split between the R/M+SIB decoding and the effective address
5844 calculation - which is something that is required for any attempt at
5845 reusing this code for a recompiler. It may also be good to have if we
5846 need to delay #UD exception caused by invalid lock prefixes.
5847
5848 For now, we'll do a mostly safe interpreter-only implementation here. */
5849 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5850 * now until tests show it's checked.. */
5851 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5852
5853 /* Register access is relatively easy and can share code. */
5854 if (IEM_IS_MODRM_REG_MODE(bRm))
5855 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5856
5857 /*
5858 * Memory target.
5859 *
5860 * Intel says that RSP is incremented before it's used in any effective
5861 * address calcuations. This means some serious extra annoyance here since
5862 * we decode and calculate the effective address in one step and like to
5863 * delay committing registers till everything is done.
5864 *
5865 * So, we'll decode and calculate the effective address twice. This will
5866 * require some recoding if turned into a recompiler.
5867 */
5868 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5869
5870#if 1 /* This can be compiled, optimize later if needed. */
5871 switch (pVCpu->iem.s.enmEffOpSize)
5872 {
5873 case IEMMODE_16BIT:
5874 {
5875 IEM_MC_BEGIN(2, 0);
5876 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5877 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
5879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5880 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5881 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
5882 IEM_MC_END();
5883 }
5884
5885 case IEMMODE_32BIT:
5886 {
5887 IEM_MC_BEGIN(2, 0);
5888 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5889 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5890 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
5891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5892 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5893 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
5894 IEM_MC_END();
5895 }
5896
5897 case IEMMODE_64BIT:
5898 {
5899 IEM_MC_BEGIN(2, 0);
5900 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5901 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
5903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5904 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5905 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
5906 IEM_MC_END();
5907 }
5908
5909 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5910 }
5911
5912#else
5913# ifndef TST_IEM_CHECK_MC
5914 /* Calc effective address with modified ESP. */
5915/** @todo testcase */
5916 RTGCPTR GCPtrEff;
5917 VBOXSTRICTRC rcStrict;
5918 switch (pVCpu->iem.s.enmEffOpSize)
5919 {
5920 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
5921 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
5922 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
5923 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5924 }
5925 if (rcStrict != VINF_SUCCESS)
5926 return rcStrict;
5927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5928
5929 /* Perform the operation - this should be CImpl. */
5930 RTUINT64U TmpRsp;
5931 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
5932 switch (pVCpu->iem.s.enmEffOpSize)
5933 {
5934 case IEMMODE_16BIT:
5935 {
5936 uint16_t u16Value;
5937 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
5938 if (rcStrict == VINF_SUCCESS)
5939 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
5940 break;
5941 }
5942
5943 case IEMMODE_32BIT:
5944 {
5945 uint32_t u32Value;
5946 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
5947 if (rcStrict == VINF_SUCCESS)
5948 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
5949 break;
5950 }
5951
5952 case IEMMODE_64BIT:
5953 {
5954 uint64_t u64Value;
5955 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
5956 if (rcStrict == VINF_SUCCESS)
5957 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
5958 break;
5959 }
5960
5961 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5962 }
5963 if (rcStrict == VINF_SUCCESS)
5964 {
5965 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
5966 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
5967 }
5968 return rcStrict;
5969
5970# else
5971 return VERR_IEM_IPE_2;
5972# endif
5973#endif
5974}
5975
5976
5977/**
5978 * @opcode 0x8f
5979 */
5980FNIEMOP_DEF(iemOp_Grp1A__xop)
5981{
5982 /*
5983 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
5984 * three byte VEX prefix, except that the mmmmm field cannot have the values
5985 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
5986 */
5987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5988 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
5989 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
5990
5991 IEMOP_MNEMONIC(xop, "xop");
5992 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
5993 {
5994 /** @todo Test when exctly the XOP conformance checks kick in during
5995 * instruction decoding and fetching (using \#PF). */
5996 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
5997 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5998 if ( ( pVCpu->iem.s.fPrefixes
5999 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6000 == 0)
6001 {
6002 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6003 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6004 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6005 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6006 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6007 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6008 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6009 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6010 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6011
6012 /** @todo XOP: Just use new tables and decoders. */
6013 switch (bRm & 0x1f)
6014 {
6015 case 8: /* xop opcode map 8. */
6016 IEMOP_BITCH_ABOUT_STUB();
6017 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6018
6019 case 9: /* xop opcode map 9. */
6020 IEMOP_BITCH_ABOUT_STUB();
6021 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6022
6023 case 10: /* xop opcode map 10. */
6024 IEMOP_BITCH_ABOUT_STUB();
6025 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6026
6027 default:
6028 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6029 IEMOP_RAISE_INVALID_OPCODE_RET();
6030 }
6031 }
6032 else
6033 Log(("XOP: Invalid prefix mix!\n"));
6034 }
6035 else
6036 Log(("XOP: XOP support disabled!\n"));
6037 IEMOP_RAISE_INVALID_OPCODE_RET();
6038}
6039
6040
6041/**
6042 * Common 'xchg reg,rAX' helper.
6043 */
6044FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6045{
6046 iReg |= pVCpu->iem.s.uRexB;
6047 switch (pVCpu->iem.s.enmEffOpSize)
6048 {
6049 case IEMMODE_16BIT:
6050 IEM_MC_BEGIN(0, 2);
6051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6052 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6053 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6054 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6055 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6056 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6057 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6058 IEM_MC_ADVANCE_RIP_AND_FINISH();
6059 IEM_MC_END();
6060 break;
6061
6062 case IEMMODE_32BIT:
6063 IEM_MC_BEGIN(0, 2);
6064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6065 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6066 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6067 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6068 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6069 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6070 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6071 IEM_MC_ADVANCE_RIP_AND_FINISH();
6072 IEM_MC_END();
6073 break;
6074
6075 case IEMMODE_64BIT:
6076 IEM_MC_BEGIN(0, 2);
6077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6078 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6079 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6080 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6081 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6082 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6083 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6084 IEM_MC_ADVANCE_RIP_AND_FINISH();
6085 IEM_MC_END();
6086 break;
6087
6088 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6089 }
6090}
6091
6092
6093/**
6094 * @opcode 0x90
6095 */
6096FNIEMOP_DEF(iemOp_nop)
6097{
6098 /* R8/R8D and RAX/EAX can be exchanged. */
6099 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6100 {
6101 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6102 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6103 }
6104
6105 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6106 {
6107 IEMOP_MNEMONIC(pause, "pause");
6108 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6109 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6110 if (!IEM_IS_IN_GUEST(pVCpu))
6111 { /* probable */ }
6112#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6113 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6114 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmx_pause);
6115#endif
6116#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6117 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6118 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_svm_pause);
6119#endif
6120 }
6121 else
6122 IEMOP_MNEMONIC(nop, "nop");
6123 /** @todo testcase: lock nop; lock pause */
6124 IEM_MC_BEGIN(0, 0);
6125 IEMOP_HLP_DONE_DECODING();
6126 IEM_MC_ADVANCE_RIP_AND_FINISH();
6127 IEM_MC_END();
6128}
6129
6130
6131/**
6132 * @opcode 0x91
6133 */
6134FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6135{
6136 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6137 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6138}
6139
6140
6141/**
6142 * @opcode 0x92
6143 */
6144FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6145{
6146 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6147 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6148}
6149
6150
6151/**
6152 * @opcode 0x93
6153 */
6154FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6155{
6156 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6157 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6158}
6159
6160
6161/**
6162 * @opcode 0x94
6163 */
6164FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6165{
6166 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6167 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6168}
6169
6170
6171/**
6172 * @opcode 0x95
6173 */
6174FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6175{
6176 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6177 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6178}
6179
6180
6181/**
6182 * @opcode 0x96
6183 */
6184FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6185{
6186 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6187 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6188}
6189
6190
6191/**
6192 * @opcode 0x97
6193 */
6194FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6195{
6196 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6197 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6198}
6199
6200
6201/**
6202 * @opcode 0x98
6203 */
6204FNIEMOP_DEF(iemOp_cbw)
6205{
6206 switch (pVCpu->iem.s.enmEffOpSize)
6207 {
6208 case IEMMODE_16BIT:
6209 IEMOP_MNEMONIC(cbw, "cbw");
6210 IEM_MC_BEGIN(0, 1);
6211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6212 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6213 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6214 } IEM_MC_ELSE() {
6215 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6216 } IEM_MC_ENDIF();
6217 IEM_MC_ADVANCE_RIP_AND_FINISH();
6218 IEM_MC_END();
6219 break;
6220
6221 case IEMMODE_32BIT:
6222 IEMOP_MNEMONIC(cwde, "cwde");
6223 IEM_MC_BEGIN(0, 1);
6224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6225 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6226 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6227 } IEM_MC_ELSE() {
6228 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6229 } IEM_MC_ENDIF();
6230 IEM_MC_ADVANCE_RIP_AND_FINISH();
6231 IEM_MC_END();
6232 break;
6233
6234 case IEMMODE_64BIT:
6235 IEMOP_MNEMONIC(cdqe, "cdqe");
6236 IEM_MC_BEGIN(0, 1);
6237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6238 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6239 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6240 } IEM_MC_ELSE() {
6241 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6242 } IEM_MC_ENDIF();
6243 IEM_MC_ADVANCE_RIP_AND_FINISH();
6244 IEM_MC_END();
6245 break;
6246
6247 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6248 }
6249}
6250
6251
6252/**
6253 * @opcode 0x99
6254 */
6255FNIEMOP_DEF(iemOp_cwd)
6256{
6257 switch (pVCpu->iem.s.enmEffOpSize)
6258 {
6259 case IEMMODE_16BIT:
6260 IEMOP_MNEMONIC(cwd, "cwd");
6261 IEM_MC_BEGIN(0, 1);
6262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6263 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6264 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6265 } IEM_MC_ELSE() {
6266 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6267 } IEM_MC_ENDIF();
6268 IEM_MC_ADVANCE_RIP_AND_FINISH();
6269 IEM_MC_END();
6270 break;
6271
6272 case IEMMODE_32BIT:
6273 IEMOP_MNEMONIC(cdq, "cdq");
6274 IEM_MC_BEGIN(0, 1);
6275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6276 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6277 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6278 } IEM_MC_ELSE() {
6279 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6280 } IEM_MC_ENDIF();
6281 IEM_MC_ADVANCE_RIP_AND_FINISH();
6282 IEM_MC_END();
6283 break;
6284
6285 case IEMMODE_64BIT:
6286 IEMOP_MNEMONIC(cqo, "cqo");
6287 IEM_MC_BEGIN(0, 1);
6288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6289 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6290 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6291 } IEM_MC_ELSE() {
6292 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6293 } IEM_MC_ENDIF();
6294 IEM_MC_ADVANCE_RIP_AND_FINISH();
6295 IEM_MC_END();
6296 break;
6297
6298 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6299 }
6300}
6301
6302
6303/**
6304 * @opcode 0x9a
6305 */
6306FNIEMOP_DEF(iemOp_call_Ap)
6307{
6308 IEMOP_MNEMONIC(call_Ap, "call Ap");
6309 IEMOP_HLP_NO_64BIT();
6310
6311 /* Decode the far pointer address and pass it on to the far call C implementation. */
6312 uint32_t off32Seg;
6313 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6314 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6315 else
6316 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6317 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6319 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
6320 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
6321 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6322}
6323
6324
6325/** Opcode 0x9b. (aka fwait) */
6326FNIEMOP_DEF(iemOp_wait)
6327{
6328 IEMOP_MNEMONIC(wait, "wait");
6329 IEM_MC_BEGIN(0, 0);
6330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6331 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6332 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6333 IEM_MC_ADVANCE_RIP_AND_FINISH();
6334 IEM_MC_END();
6335}
6336
6337
6338/**
6339 * @opcode 0x9c
6340 */
6341FNIEMOP_DEF(iemOp_pushf_Fv)
6342{
6343 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6345 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6346 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6347}
6348
6349
6350/**
6351 * @opcode 0x9d
6352 */
6353FNIEMOP_DEF(iemOp_popf_Fv)
6354{
6355 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6357 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6358 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6359 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6360}
6361
6362
6363/**
6364 * @opcode 0x9e
6365 */
6366FNIEMOP_DEF(iemOp_sahf)
6367{
6368 IEMOP_MNEMONIC(sahf, "sahf");
6369 if ( IEM_IS_64BIT_CODE(pVCpu)
6370 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6371 IEMOP_RAISE_INVALID_OPCODE_RET();
6372 IEM_MC_BEGIN(0, 2);
6373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6374 IEM_MC_LOCAL(uint32_t, u32Flags);
6375 IEM_MC_LOCAL(uint32_t, EFlags);
6376 IEM_MC_FETCH_EFLAGS(EFlags);
6377 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6378 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6379 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6380 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6381 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6382 IEM_MC_COMMIT_EFLAGS(EFlags);
6383 IEM_MC_ADVANCE_RIP_AND_FINISH();
6384 IEM_MC_END();
6385}
6386
6387
6388/**
6389 * @opcode 0x9f
6390 */
6391FNIEMOP_DEF(iemOp_lahf)
6392{
6393 IEMOP_MNEMONIC(lahf, "lahf");
6394 if ( IEM_IS_64BIT_CODE(pVCpu)
6395 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6396 IEMOP_RAISE_INVALID_OPCODE_RET();
6397 IEM_MC_BEGIN(0, 1);
6398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6399 IEM_MC_LOCAL(uint8_t, u8Flags);
6400 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6401 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6402 IEM_MC_ADVANCE_RIP_AND_FINISH();
6403 IEM_MC_END();
6404}
6405
6406
6407/**
6408 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6409 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6410 * Will return/throw on failures.
6411 * @param a_GCPtrMemOff The variable to store the offset in.
6412 */
6413#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6414 do \
6415 { \
6416 switch (pVCpu->iem.s.enmEffAddrMode) \
6417 { \
6418 case IEMMODE_16BIT: \
6419 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6420 break; \
6421 case IEMMODE_32BIT: \
6422 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6423 break; \
6424 case IEMMODE_64BIT: \
6425 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6426 break; \
6427 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6428 } \
6429 } while (0)
6430
6431/**
6432 * @opcode 0xa0
6433 */
6434FNIEMOP_DEF(iemOp_mov_AL_Ob)
6435{
6436 /*
6437 * Get the offset.
6438 */
6439 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6440 RTGCPTR GCPtrMemOff;
6441 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6442
6443 /*
6444 * Fetch AL.
6445 */
6446 IEM_MC_BEGIN(0,1);
6447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6448 IEM_MC_LOCAL(uint8_t, u8Tmp);
6449 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6450 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6451 IEM_MC_ADVANCE_RIP_AND_FINISH();
6452 IEM_MC_END();
6453}
6454
6455
6456/**
6457 * @opcode 0xa1
6458 */
6459FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6460{
6461 /*
6462 * Get the offset.
6463 */
6464 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6465 RTGCPTR GCPtrMemOff;
6466 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6467
6468 /*
6469 * Fetch rAX.
6470 */
6471 switch (pVCpu->iem.s.enmEffOpSize)
6472 {
6473 case IEMMODE_16BIT:
6474 IEM_MC_BEGIN(0,1);
6475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6476 IEM_MC_LOCAL(uint16_t, u16Tmp);
6477 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6478 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6479 IEM_MC_ADVANCE_RIP_AND_FINISH();
6480 IEM_MC_END();
6481 break;
6482
6483 case IEMMODE_32BIT:
6484 IEM_MC_BEGIN(0,1);
6485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6486 IEM_MC_LOCAL(uint32_t, u32Tmp);
6487 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6488 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6489 IEM_MC_ADVANCE_RIP_AND_FINISH();
6490 IEM_MC_END();
6491 break;
6492
6493 case IEMMODE_64BIT:
6494 IEM_MC_BEGIN(0,1);
6495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6496 IEM_MC_LOCAL(uint64_t, u64Tmp);
6497 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6498 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6499 IEM_MC_ADVANCE_RIP_AND_FINISH();
6500 IEM_MC_END();
6501 break;
6502
6503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6504 }
6505}
6506
6507
6508/**
6509 * @opcode 0xa2
6510 */
6511FNIEMOP_DEF(iemOp_mov_Ob_AL)
6512{
6513 /*
6514 * Get the offset.
6515 */
6516 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6517 RTGCPTR GCPtrMemOff;
6518 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6519
6520 /*
6521 * Store AL.
6522 */
6523 IEM_MC_BEGIN(0,1);
6524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6525 IEM_MC_LOCAL(uint8_t, u8Tmp);
6526 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6527 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6528 IEM_MC_ADVANCE_RIP_AND_FINISH();
6529 IEM_MC_END();
6530}
6531
6532
6533/**
6534 * @opcode 0xa3
6535 */
6536FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6537{
6538 /*
6539 * Get the offset.
6540 */
6541 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6542 RTGCPTR GCPtrMemOff;
6543 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6544
6545 /*
6546 * Store rAX.
6547 */
6548 switch (pVCpu->iem.s.enmEffOpSize)
6549 {
6550 case IEMMODE_16BIT:
6551 IEM_MC_BEGIN(0,1);
6552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6553 IEM_MC_LOCAL(uint16_t, u16Tmp);
6554 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6555 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6556 IEM_MC_ADVANCE_RIP_AND_FINISH();
6557 IEM_MC_END();
6558 break;
6559
6560 case IEMMODE_32BIT:
6561 IEM_MC_BEGIN(0,1);
6562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6563 IEM_MC_LOCAL(uint32_t, u32Tmp);
6564 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6565 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6566 IEM_MC_ADVANCE_RIP_AND_FINISH();
6567 IEM_MC_END();
6568 break;
6569
6570 case IEMMODE_64BIT:
6571 IEM_MC_BEGIN(0,1);
6572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6573 IEM_MC_LOCAL(uint64_t, u64Tmp);
6574 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6575 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6576 IEM_MC_ADVANCE_RIP_AND_FINISH();
6577 IEM_MC_END();
6578 break;
6579
6580 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6581 }
6582}
6583
6584/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6585#define IEM_MOVS_CASE(ValBits, AddrBits) \
6586 IEM_MC_BEGIN(0, 2); \
6587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6588 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6589 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6590 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6591 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6592 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6593 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6594 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6595 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6596 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6597 } IEM_MC_ELSE() { \
6598 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6599 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6600 } IEM_MC_ENDIF(); \
6601 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6602 IEM_MC_END() \
6603
6604/**
6605 * @opcode 0xa4
6606 */
6607FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6608{
6609 /*
6610 * Use the C implementation if a repeat prefix is encountered.
6611 */
6612 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6613 {
6614 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6616 switch (pVCpu->iem.s.enmEffAddrMode)
6617 {
6618 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6619 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6620 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6622 }
6623 }
6624
6625 /*
6626 * Sharing case implementation with movs[wdq] below.
6627 */
6628 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6629 switch (pVCpu->iem.s.enmEffAddrMode)
6630 {
6631 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
6632 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
6633 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
6634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6635 }
6636}
6637
6638
6639/**
6640 * @opcode 0xa5
6641 */
6642FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6643{
6644
6645 /*
6646 * Use the C implementation if a repeat prefix is encountered.
6647 */
6648 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6649 {
6650 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6652 switch (pVCpu->iem.s.enmEffOpSize)
6653 {
6654 case IEMMODE_16BIT:
6655 switch (pVCpu->iem.s.enmEffAddrMode)
6656 {
6657 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6658 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6659 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6660 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6661 }
6662 break;
6663 case IEMMODE_32BIT:
6664 switch (pVCpu->iem.s.enmEffAddrMode)
6665 {
6666 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6667 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6668 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6669 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6670 }
6671 case IEMMODE_64BIT:
6672 switch (pVCpu->iem.s.enmEffAddrMode)
6673 {
6674 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6675 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6676 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6677 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6678 }
6679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6680 }
6681 }
6682
6683 /*
6684 * Annoying double switch here.
6685 * Using ugly macro for implementing the cases, sharing it with movsb.
6686 */
6687 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6688 switch (pVCpu->iem.s.enmEffOpSize)
6689 {
6690 case IEMMODE_16BIT:
6691 switch (pVCpu->iem.s.enmEffAddrMode)
6692 {
6693 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
6694 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
6695 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
6696 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6697 }
6698 break;
6699
6700 case IEMMODE_32BIT:
6701 switch (pVCpu->iem.s.enmEffAddrMode)
6702 {
6703 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
6704 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
6705 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
6706 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6707 }
6708 break;
6709
6710 case IEMMODE_64BIT:
6711 switch (pVCpu->iem.s.enmEffAddrMode)
6712 {
6713 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6714 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
6715 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
6716 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6717 }
6718 break;
6719 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6720 }
6721}
6722
6723#undef IEM_MOVS_CASE
6724
6725/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6726#define IEM_CMPS_CASE(ValBits, AddrBits) \
6727 IEM_MC_BEGIN(3, 3); \
6728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6729 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
6730 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6731 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6732 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6733 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6734 \
6735 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6736 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
6737 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6738 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
6739 IEM_MC_REF_LOCAL(puValue1, uValue1); \
6740 IEM_MC_REF_EFLAGS(pEFlags); \
6741 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6742 \
6743 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6744 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6745 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6746 } IEM_MC_ELSE() { \
6747 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6748 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6749 } IEM_MC_ENDIF(); \
6750 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6751 IEM_MC_END() \
6752
6753/**
6754 * @opcode 0xa6
6755 */
6756FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6757{
6758
6759 /*
6760 * Use the C implementation if a repeat prefix is encountered.
6761 */
6762 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6763 {
6764 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6766 switch (pVCpu->iem.s.enmEffAddrMode)
6767 {
6768 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6769 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6770 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6771 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6772 }
6773 }
6774 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6775 {
6776 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6778 switch (pVCpu->iem.s.enmEffAddrMode)
6779 {
6780 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6781 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6782 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6784 }
6785 }
6786
6787 /*
6788 * Sharing case implementation with cmps[wdq] below.
6789 */
6790 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
6791 switch (pVCpu->iem.s.enmEffAddrMode)
6792 {
6793 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
6794 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
6795 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
6796 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6797 }
6798}
6799
6800
6801/**
6802 * @opcode 0xa7
6803 */
6804FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
6805{
6806 /*
6807 * Use the C implementation if a repeat prefix is encountered.
6808 */
6809 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6810 {
6811 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
6812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6813 switch (pVCpu->iem.s.enmEffOpSize)
6814 {
6815 case IEMMODE_16BIT:
6816 switch (pVCpu->iem.s.enmEffAddrMode)
6817 {
6818 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6819 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6820 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6821 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6822 }
6823 break;
6824 case IEMMODE_32BIT:
6825 switch (pVCpu->iem.s.enmEffAddrMode)
6826 {
6827 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6828 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6829 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6831 }
6832 case IEMMODE_64BIT:
6833 switch (pVCpu->iem.s.enmEffAddrMode)
6834 {
6835 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
6836 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6837 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6839 }
6840 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6841 }
6842 }
6843
6844 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6845 {
6846 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
6847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6848 switch (pVCpu->iem.s.enmEffOpSize)
6849 {
6850 case IEMMODE_16BIT:
6851 switch (pVCpu->iem.s.enmEffAddrMode)
6852 {
6853 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6854 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6855 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6856 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6857 }
6858 break;
6859 case IEMMODE_32BIT:
6860 switch (pVCpu->iem.s.enmEffAddrMode)
6861 {
6862 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6863 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6864 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6865 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6866 }
6867 case IEMMODE_64BIT:
6868 switch (pVCpu->iem.s.enmEffAddrMode)
6869 {
6870 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
6871 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6872 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6873 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6874 }
6875 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6876 }
6877 }
6878
6879 /*
6880 * Annoying double switch here.
6881 * Using ugly macro for implementing the cases, sharing it with cmpsb.
6882 */
6883 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
6884 switch (pVCpu->iem.s.enmEffOpSize)
6885 {
6886 case IEMMODE_16BIT:
6887 switch (pVCpu->iem.s.enmEffAddrMode)
6888 {
6889 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
6890 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
6891 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
6892 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6893 }
6894 break;
6895
6896 case IEMMODE_32BIT:
6897 switch (pVCpu->iem.s.enmEffAddrMode)
6898 {
6899 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
6900 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
6901 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
6902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6903 }
6904 break;
6905
6906 case IEMMODE_64BIT:
6907 switch (pVCpu->iem.s.enmEffAddrMode)
6908 {
6909 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6910 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
6911 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
6912 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6913 }
6914 break;
6915 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6916 }
6917}
6918
6919#undef IEM_CMPS_CASE
6920
6921/**
6922 * @opcode 0xa8
6923 */
6924FNIEMOP_DEF(iemOp_test_AL_Ib)
6925{
6926 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
6927 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6928 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
6929}
6930
6931
6932/**
6933 * @opcode 0xa9
6934 */
6935FNIEMOP_DEF(iemOp_test_eAX_Iz)
6936{
6937 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
6938 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6939 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
6940}
6941
6942
6943/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
6944#define IEM_STOS_CASE(ValBits, AddrBits) \
6945 IEM_MC_BEGIN(0, 2); \
6946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6947 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6948 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6949 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
6950 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6951 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6952 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6953 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6954 } IEM_MC_ELSE() { \
6955 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6956 } IEM_MC_ENDIF(); \
6957 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6958 IEM_MC_END() \
6959
6960/**
6961 * @opcode 0xaa
6962 */
6963FNIEMOP_DEF(iemOp_stosb_Yb_AL)
6964{
6965 /*
6966 * Use the C implementation if a repeat prefix is encountered.
6967 */
6968 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6969 {
6970 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
6971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6972 switch (pVCpu->iem.s.enmEffAddrMode)
6973 {
6974 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m16);
6975 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m32);
6976 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m64);
6977 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6978 }
6979 }
6980
6981 /*
6982 * Sharing case implementation with stos[wdq] below.
6983 */
6984 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
6985 switch (pVCpu->iem.s.enmEffAddrMode)
6986 {
6987 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
6988 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
6989 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
6990 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6991 }
6992}
6993
6994
6995/**
6996 * @opcode 0xab
6997 */
6998FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
6999{
7000 /*
7001 * Use the C implementation if a repeat prefix is encountered.
7002 */
7003 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7004 {
7005 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7007 switch (pVCpu->iem.s.enmEffOpSize)
7008 {
7009 case IEMMODE_16BIT:
7010 switch (pVCpu->iem.s.enmEffAddrMode)
7011 {
7012 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m16);
7013 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m32);
7014 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m64);
7015 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7016 }
7017 break;
7018 case IEMMODE_32BIT:
7019 switch (pVCpu->iem.s.enmEffAddrMode)
7020 {
7021 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m16);
7022 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m32);
7023 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m64);
7024 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7025 }
7026 case IEMMODE_64BIT:
7027 switch (pVCpu->iem.s.enmEffAddrMode)
7028 {
7029 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7030 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m32);
7031 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m64);
7032 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7033 }
7034 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7035 }
7036 }
7037
7038 /*
7039 * Annoying double switch here.
7040 * Using ugly macro for implementing the cases, sharing it with stosb.
7041 */
7042 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7043 switch (pVCpu->iem.s.enmEffOpSize)
7044 {
7045 case IEMMODE_16BIT:
7046 switch (pVCpu->iem.s.enmEffAddrMode)
7047 {
7048 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
7049 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
7050 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
7051 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7052 }
7053 break;
7054
7055 case IEMMODE_32BIT:
7056 switch (pVCpu->iem.s.enmEffAddrMode)
7057 {
7058 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
7059 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
7060 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
7061 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7062 }
7063 break;
7064
7065 case IEMMODE_64BIT:
7066 switch (pVCpu->iem.s.enmEffAddrMode)
7067 {
7068 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7069 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
7070 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
7071 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7072 }
7073 break;
7074 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7075 }
7076}
7077
7078#undef IEM_STOS_CASE
7079
7080/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7081#define IEM_LODS_CASE(ValBits, AddrBits) \
7082 IEM_MC_BEGIN(0, 2); \
7083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7084 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7085 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7086 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7087 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7088 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7089 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7090 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7091 } IEM_MC_ELSE() { \
7092 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7093 } IEM_MC_ENDIF(); \
7094 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7095 IEM_MC_END() \
7096
7097/**
7098 * @opcode 0xac
7099 */
7100FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7101{
7102 /*
7103 * Use the C implementation if a repeat prefix is encountered.
7104 */
7105 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7106 {
7107 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7109 switch (pVCpu->iem.s.enmEffAddrMode)
7110 {
7111 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7112 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7113 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7114 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7115 }
7116 }
7117
7118 /*
7119 * Sharing case implementation with stos[wdq] below.
7120 */
7121 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7122 switch (pVCpu->iem.s.enmEffAddrMode)
7123 {
7124 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
7125 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
7126 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
7127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7128 }
7129}
7130
7131
7132/**
7133 * @opcode 0xad
7134 */
7135FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7136{
7137 /*
7138 * Use the C implementation if a repeat prefix is encountered.
7139 */
7140 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7141 {
7142 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7144 switch (pVCpu->iem.s.enmEffOpSize)
7145 {
7146 case IEMMODE_16BIT:
7147 switch (pVCpu->iem.s.enmEffAddrMode)
7148 {
7149 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7150 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7151 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7152 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7153 }
7154 break;
7155 case IEMMODE_32BIT:
7156 switch (pVCpu->iem.s.enmEffAddrMode)
7157 {
7158 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7159 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7160 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7162 }
7163 case IEMMODE_64BIT:
7164 switch (pVCpu->iem.s.enmEffAddrMode)
7165 {
7166 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7167 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7168 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7169 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7170 }
7171 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7172 }
7173 }
7174
7175 /*
7176 * Annoying double switch here.
7177 * Using ugly macro for implementing the cases, sharing it with lodsb.
7178 */
7179 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7180 switch (pVCpu->iem.s.enmEffOpSize)
7181 {
7182 case IEMMODE_16BIT:
7183 switch (pVCpu->iem.s.enmEffAddrMode)
7184 {
7185 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
7186 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
7187 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
7188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7189 }
7190 break;
7191
7192 case IEMMODE_32BIT:
7193 switch (pVCpu->iem.s.enmEffAddrMode)
7194 {
7195 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
7196 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
7197 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
7198 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7199 }
7200 break;
7201
7202 case IEMMODE_64BIT:
7203 switch (pVCpu->iem.s.enmEffAddrMode)
7204 {
7205 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7206 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
7207 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
7208 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7209 }
7210 break;
7211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7212 }
7213}
7214
7215#undef IEM_LODS_CASE
7216
7217/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7218#define IEM_SCAS_CASE(ValBits, AddrBits) \
7219 IEM_MC_BEGIN(3, 2); \
7220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7221 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7222 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7223 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7224 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7225 \
7226 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7227 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7228 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7229 IEM_MC_REF_EFLAGS(pEFlags); \
7230 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7231 \
7232 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7233 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7234 } IEM_MC_ELSE() { \
7235 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7236 } IEM_MC_ENDIF(); \
7237 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7238 IEM_MC_END();
7239
7240/**
7241 * @opcode 0xae
7242 */
7243FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7244{
7245 /*
7246 * Use the C implementation if a repeat prefix is encountered.
7247 */
7248 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7249 {
7250 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7252 switch (pVCpu->iem.s.enmEffAddrMode)
7253 {
7254 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m16);
7255 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m32);
7256 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m64);
7257 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7258 }
7259 }
7260 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7261 {
7262 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7264 switch (pVCpu->iem.s.enmEffAddrMode)
7265 {
7266 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m16);
7267 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m32);
7268 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m64);
7269 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7270 }
7271 }
7272
7273 /*
7274 * Sharing case implementation with stos[wdq] below.
7275 */
7276 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7277 switch (pVCpu->iem.s.enmEffAddrMode)
7278 {
7279 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
7280 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
7281 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
7282 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7283 }
7284}
7285
7286
7287/**
7288 * @opcode 0xaf
7289 */
7290FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7291{
7292 /*
7293 * Use the C implementation if a repeat prefix is encountered.
7294 */
7295 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7296 {
7297 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7299 switch (pVCpu->iem.s.enmEffOpSize)
7300 {
7301 case IEMMODE_16BIT:
7302 switch (pVCpu->iem.s.enmEffAddrMode)
7303 {
7304 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m16);
7305 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m32);
7306 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m64);
7307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7308 }
7309 break;
7310 case IEMMODE_32BIT:
7311 switch (pVCpu->iem.s.enmEffAddrMode)
7312 {
7313 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m16);
7314 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m32);
7315 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m64);
7316 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7317 }
7318 case IEMMODE_64BIT:
7319 switch (pVCpu->iem.s.enmEffAddrMode)
7320 {
7321 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7322 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m32);
7323 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m64);
7324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7325 }
7326 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7327 }
7328 }
7329 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7330 {
7331 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7333 switch (pVCpu->iem.s.enmEffOpSize)
7334 {
7335 case IEMMODE_16BIT:
7336 switch (pVCpu->iem.s.enmEffAddrMode)
7337 {
7338 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m16);
7339 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m32);
7340 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m64);
7341 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7342 }
7343 break;
7344 case IEMMODE_32BIT:
7345 switch (pVCpu->iem.s.enmEffAddrMode)
7346 {
7347 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m16);
7348 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m32);
7349 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m64);
7350 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7351 }
7352 case IEMMODE_64BIT:
7353 switch (pVCpu->iem.s.enmEffAddrMode)
7354 {
7355 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
7356 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m32);
7357 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m64);
7358 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7359 }
7360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7361 }
7362 }
7363
7364 /*
7365 * Annoying double switch here.
7366 * Using ugly macro for implementing the cases, sharing it with scasb.
7367 */
7368 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
7369 switch (pVCpu->iem.s.enmEffOpSize)
7370 {
7371 case IEMMODE_16BIT:
7372 switch (pVCpu->iem.s.enmEffAddrMode)
7373 {
7374 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
7375 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
7376 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
7377 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7378 }
7379 break;
7380
7381 case IEMMODE_32BIT:
7382 switch (pVCpu->iem.s.enmEffAddrMode)
7383 {
7384 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
7385 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
7386 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
7387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7388 }
7389 break;
7390
7391 case IEMMODE_64BIT:
7392 switch (pVCpu->iem.s.enmEffAddrMode)
7393 {
7394 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7395 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
7396 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
7397 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7398 }
7399 break;
7400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7401 }
7402}
7403
7404#undef IEM_SCAS_CASE
7405
7406/**
7407 * Common 'mov r8, imm8' helper.
7408 */
7409FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
7410{
7411 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7412 IEM_MC_BEGIN(0, 1);
7413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7414 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
7415 IEM_MC_STORE_GREG_U8(iFixedReg, u8Value);
7416 IEM_MC_ADVANCE_RIP_AND_FINISH();
7417 IEM_MC_END();
7418}
7419
7420
7421/**
7422 * @opcode 0xb0
7423 */
7424FNIEMOP_DEF(iemOp_mov_AL_Ib)
7425{
7426 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
7427 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7428}
7429
7430
7431/**
7432 * @opcode 0xb1
7433 */
7434FNIEMOP_DEF(iemOp_CL_Ib)
7435{
7436 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
7437 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7438}
7439
7440
7441/**
7442 * @opcode 0xb2
7443 */
7444FNIEMOP_DEF(iemOp_DL_Ib)
7445{
7446 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
7447 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7448}
7449
7450
7451/**
7452 * @opcode 0xb3
7453 */
7454FNIEMOP_DEF(iemOp_BL_Ib)
7455{
7456 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
7457 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7458}
7459
7460
7461/**
7462 * @opcode 0xb4
7463 */
7464FNIEMOP_DEF(iemOp_mov_AH_Ib)
7465{
7466 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
7467 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7468}
7469
7470
7471/**
7472 * @opcode 0xb5
7473 */
7474FNIEMOP_DEF(iemOp_CH_Ib)
7475{
7476 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
7477 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7478}
7479
7480
7481/**
7482 * @opcode 0xb6
7483 */
7484FNIEMOP_DEF(iemOp_DH_Ib)
7485{
7486 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
7487 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7488}
7489
7490
7491/**
7492 * @opcode 0xb7
7493 */
7494FNIEMOP_DEF(iemOp_BH_Ib)
7495{
7496 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
7497 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7498}
7499
7500
7501/**
7502 * Common 'mov regX,immX' helper.
7503 */
7504FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
7505{
7506 switch (pVCpu->iem.s.enmEffOpSize)
7507 {
7508 case IEMMODE_16BIT:
7509 {
7510 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7511 IEM_MC_BEGIN(0, 1);
7512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7513 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
7514 IEM_MC_STORE_GREG_U16(iFixedReg, u16Value);
7515 IEM_MC_ADVANCE_RIP_AND_FINISH();
7516 IEM_MC_END();
7517 break;
7518 }
7519
7520 case IEMMODE_32BIT:
7521 {
7522 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7523 IEM_MC_BEGIN(0, 1);
7524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7525 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
7526 IEM_MC_STORE_GREG_U32(iFixedReg, u32Value);
7527 IEM_MC_ADVANCE_RIP_AND_FINISH();
7528 IEM_MC_END();
7529 break;
7530 }
7531 case IEMMODE_64BIT:
7532 {
7533 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
7534 IEM_MC_BEGIN(0, 1);
7535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7536 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
7537 IEM_MC_STORE_GREG_U64(iFixedReg, u64Value);
7538 IEM_MC_ADVANCE_RIP_AND_FINISH();
7539 IEM_MC_END();
7540 break;
7541 }
7542 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7543 }
7544}
7545
7546
7547/**
7548 * @opcode 0xb8
7549 */
7550FNIEMOP_DEF(iemOp_eAX_Iv)
7551{
7552 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
7553 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7554}
7555
7556
7557/**
7558 * @opcode 0xb9
7559 */
7560FNIEMOP_DEF(iemOp_eCX_Iv)
7561{
7562 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
7563 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7564}
7565
7566
7567/**
7568 * @opcode 0xba
7569 */
7570FNIEMOP_DEF(iemOp_eDX_Iv)
7571{
7572 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
7573 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7574}
7575
7576
7577/**
7578 * @opcode 0xbb
7579 */
7580FNIEMOP_DEF(iemOp_eBX_Iv)
7581{
7582 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
7583 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7584}
7585
7586
7587/**
7588 * @opcode 0xbc
7589 */
7590FNIEMOP_DEF(iemOp_eSP_Iv)
7591{
7592 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
7593 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7594}
7595
7596
7597/**
7598 * @opcode 0xbd
7599 */
7600FNIEMOP_DEF(iemOp_eBP_Iv)
7601{
7602 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
7603 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7604}
7605
7606
7607/**
7608 * @opcode 0xbe
7609 */
7610FNIEMOP_DEF(iemOp_eSI_Iv)
7611{
7612 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
7613 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7614}
7615
7616
7617/**
7618 * @opcode 0xbf
7619 */
7620FNIEMOP_DEF(iemOp_eDI_Iv)
7621{
7622 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
7623 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7624}
7625
7626
7627/**
7628 * @opcode 0xc0
7629 */
7630FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
7631{
7632 IEMOP_HLP_MIN_186();
7633 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7634 PCIEMOPSHIFTSIZES pImpl;
7635 switch (IEM_GET_MODRM_REG_8(bRm))
7636 {
7637 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
7638 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
7639 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
7640 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
7641 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
7642 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
7643 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
7644 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7645 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7646 }
7647 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7648
7649 if (IEM_IS_MODRM_REG_MODE(bRm))
7650 {
7651 /* register */
7652 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7653 IEM_MC_BEGIN(3, 0);
7654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7655 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7656 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7657 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7658 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7659 IEM_MC_REF_EFLAGS(pEFlags);
7660 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7661 IEM_MC_ADVANCE_RIP_AND_FINISH();
7662 IEM_MC_END();
7663 }
7664 else
7665 {
7666 /* memory */
7667 IEM_MC_BEGIN(3, 3);
7668 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7669 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7670 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7672 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7673
7674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7675 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7676 IEM_MC_ASSIGN(cShiftArg, cShift);
7677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7678 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7679 IEM_MC_FETCH_EFLAGS(EFlags);
7680 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7681
7682 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
7683 IEM_MC_COMMIT_EFLAGS(EFlags);
7684 IEM_MC_ADVANCE_RIP_AND_FINISH();
7685 IEM_MC_END();
7686 }
7687}
7688
7689
7690/**
7691 * @opcode 0xc1
7692 */
7693FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
7694{
7695 IEMOP_HLP_MIN_186();
7696 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7697 PCIEMOPSHIFTSIZES pImpl;
7698 switch (IEM_GET_MODRM_REG_8(bRm))
7699 {
7700 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
7701 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
7702 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
7703 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
7704 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
7705 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
7706 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
7707 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7708 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7709 }
7710 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7711
7712 if (IEM_IS_MODRM_REG_MODE(bRm))
7713 {
7714 /* register */
7715 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7716 switch (pVCpu->iem.s.enmEffOpSize)
7717 {
7718 case IEMMODE_16BIT:
7719 IEM_MC_BEGIN(3, 0);
7720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7721 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7722 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7723 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7724 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7725 IEM_MC_REF_EFLAGS(pEFlags);
7726 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7727 IEM_MC_ADVANCE_RIP_AND_FINISH();
7728 IEM_MC_END();
7729 break;
7730
7731 case IEMMODE_32BIT:
7732 IEM_MC_BEGIN(3, 0);
7733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7734 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7735 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7736 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7737 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7738 IEM_MC_REF_EFLAGS(pEFlags);
7739 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7740 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7741 IEM_MC_ADVANCE_RIP_AND_FINISH();
7742 IEM_MC_END();
7743 break;
7744
7745 case IEMMODE_64BIT:
7746 IEM_MC_BEGIN(3, 0);
7747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7748 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7749 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7750 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7751 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7752 IEM_MC_REF_EFLAGS(pEFlags);
7753 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7754 IEM_MC_ADVANCE_RIP_AND_FINISH();
7755 IEM_MC_END();
7756 break;
7757
7758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7759 }
7760 }
7761 else
7762 {
7763 /* memory */
7764 switch (pVCpu->iem.s.enmEffOpSize)
7765 {
7766 case IEMMODE_16BIT:
7767 IEM_MC_BEGIN(3, 3);
7768 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7769 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7770 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7772 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7773
7774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7775 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7776 IEM_MC_ASSIGN(cShiftArg, cShift);
7777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7778 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7779 IEM_MC_FETCH_EFLAGS(EFlags);
7780 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7781
7782 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
7783 IEM_MC_COMMIT_EFLAGS(EFlags);
7784 IEM_MC_ADVANCE_RIP_AND_FINISH();
7785 IEM_MC_END();
7786 break;
7787
7788 case IEMMODE_32BIT:
7789 IEM_MC_BEGIN(3, 3);
7790 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7791 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7792 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7794 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7795
7796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7797 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7798 IEM_MC_ASSIGN(cShiftArg, cShift);
7799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7800 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7801 IEM_MC_FETCH_EFLAGS(EFlags);
7802 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7803
7804 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
7805 IEM_MC_COMMIT_EFLAGS(EFlags);
7806 IEM_MC_ADVANCE_RIP_AND_FINISH();
7807 IEM_MC_END();
7808 break;
7809
7810 case IEMMODE_64BIT:
7811 IEM_MC_BEGIN(3, 3);
7812 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7813 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7814 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7815 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7816 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7817
7818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7819 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7820 IEM_MC_ASSIGN(cShiftArg, cShift);
7821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7822 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7823 IEM_MC_FETCH_EFLAGS(EFlags);
7824 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7825
7826 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
7827 IEM_MC_COMMIT_EFLAGS(EFlags);
7828 IEM_MC_ADVANCE_RIP_AND_FINISH();
7829 IEM_MC_END();
7830 break;
7831
7832 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7833 }
7834 }
7835}
7836
7837
7838/**
7839 * @opcode 0xc2
7840 */
7841FNIEMOP_DEF(iemOp_retn_Iw)
7842{
7843 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
7844 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7845 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7847 switch (pVCpu->iem.s.enmEffOpSize)
7848 {
7849 case IEMMODE_16BIT:
7850 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_16, u16Imm);
7851 case IEMMODE_32BIT:
7852 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_32, u16Imm);
7853 case IEMMODE_64BIT:
7854 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_64, u16Imm);
7855 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7856 }
7857}
7858
7859
7860/**
7861 * @opcode 0xc3
7862 */
7863FNIEMOP_DEF(iemOp_retn)
7864{
7865 IEMOP_MNEMONIC(retn, "retn");
7866 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7868 switch (pVCpu->iem.s.enmEffOpSize)
7869 {
7870 case IEMMODE_16BIT:
7871 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_16);
7872 case IEMMODE_32BIT:
7873 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_32);
7874 case IEMMODE_64BIT:
7875 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_64);
7876 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7877 }
7878}
7879
7880
7881/**
7882 * @opcode 0xc4
7883 */
7884FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
7885{
7886 /* The LDS instruction is invalid 64-bit mode. In legacy and
7887 compatability mode it is invalid with MOD=3.
7888 The use as a VEX prefix is made possible by assigning the inverted
7889 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
7890 outside of 64-bit mode. VEX is not available in real or v86 mode. */
7891 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7892 if ( IEM_IS_64BIT_CODE(pVCpu)
7893 || IEM_IS_MODRM_REG_MODE(bRm) )
7894 {
7895 IEMOP_MNEMONIC(vex3_prefix, "vex3");
7896 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7897 {
7898 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7899 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7900 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
7901 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7902 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7903 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
7904 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
7905 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7906 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
7907 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
7908 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
7909 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
7910 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
7911
7912 switch (bRm & 0x1f)
7913 {
7914 case 1: /* 0x0f lead opcode byte. */
7915#ifdef IEM_WITH_VEX
7916 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7917#else
7918 IEMOP_BITCH_ABOUT_STUB();
7919 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7920#endif
7921
7922 case 2: /* 0x0f 0x38 lead opcode bytes. */
7923#ifdef IEM_WITH_VEX
7924 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7925#else
7926 IEMOP_BITCH_ABOUT_STUB();
7927 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7928#endif
7929
7930 case 3: /* 0x0f 0x3a lead opcode bytes. */
7931#ifdef IEM_WITH_VEX
7932 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7933#else
7934 IEMOP_BITCH_ABOUT_STUB();
7935 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7936#endif
7937
7938 default:
7939 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
7940 IEMOP_RAISE_INVALID_OPCODE_RET();
7941 }
7942 }
7943 Log(("VEX3: VEX support disabled!\n"));
7944 IEMOP_RAISE_INVALID_OPCODE_RET();
7945 }
7946
7947 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
7948 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
7949}
7950
7951
7952/**
7953 * @opcode 0xc5
7954 */
7955FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
7956{
7957 /* The LES instruction is invalid 64-bit mode. In legacy and
7958 compatability mode it is invalid with MOD=3.
7959 The use as a VEX prefix is made possible by assigning the inverted
7960 REX.R to the top MOD bit, and the top bit in the inverted register
7961 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
7962 to accessing registers 0..7 in this VEX form. */
7963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7964 if ( IEM_IS_64BIT_CODE(pVCpu)
7965 || IEM_IS_MODRM_REG_MODE(bRm))
7966 {
7967 IEMOP_MNEMONIC(vex2_prefix, "vex2");
7968 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7969 {
7970 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7971 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7972 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7973 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7974 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7975 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
7976 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
7977 pVCpu->iem.s.idxPrefix = bRm & 0x3;
7978
7979#ifdef IEM_WITH_VEX
7980 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7981#else
7982 IEMOP_BITCH_ABOUT_STUB();
7983 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7984#endif
7985 }
7986
7987 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
7988 Log(("VEX2: VEX support disabled!\n"));
7989 IEMOP_RAISE_INVALID_OPCODE_RET();
7990 }
7991
7992 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
7993 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
7994}
7995
7996
7997/**
7998 * @opcode 0xc6
7999 */
8000FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
8001{
8002 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8003 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8004 IEMOP_RAISE_INVALID_OPCODE_RET();
8005 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
8006
8007 if (IEM_IS_MODRM_REG_MODE(bRm))
8008 {
8009 /* register access */
8010 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8011 IEM_MC_BEGIN(0, 0);
8012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8013 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
8014 IEM_MC_ADVANCE_RIP_AND_FINISH();
8015 IEM_MC_END();
8016 }
8017 else
8018 {
8019 /* memory access. */
8020 IEM_MC_BEGIN(0, 1);
8021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8023 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8025 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
8026 IEM_MC_ADVANCE_RIP_AND_FINISH();
8027 IEM_MC_END();
8028 }
8029}
8030
8031
8032/**
8033 * @opcode 0xc7
8034 */
8035FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
8036{
8037 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8038 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8039 IEMOP_RAISE_INVALID_OPCODE_RET();
8040 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
8041
8042 if (IEM_IS_MODRM_REG_MODE(bRm))
8043 {
8044 /* register access */
8045 switch (pVCpu->iem.s.enmEffOpSize)
8046 {
8047 case IEMMODE_16BIT:
8048 IEM_MC_BEGIN(0, 0);
8049 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8051 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8052 IEM_MC_ADVANCE_RIP_AND_FINISH();
8053 IEM_MC_END();
8054 break;
8055
8056 case IEMMODE_32BIT:
8057 IEM_MC_BEGIN(0, 0);
8058 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8060 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8061 IEM_MC_ADVANCE_RIP_AND_FINISH();
8062 IEM_MC_END();
8063 break;
8064
8065 case IEMMODE_64BIT:
8066 IEM_MC_BEGIN(0, 0);
8067 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8069 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8070 IEM_MC_ADVANCE_RIP_AND_FINISH();
8071 IEM_MC_END();
8072 break;
8073
8074 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8075 }
8076 }
8077 else
8078 {
8079 /* memory access. */
8080 switch (pVCpu->iem.s.enmEffOpSize)
8081 {
8082 case IEMMODE_16BIT:
8083 IEM_MC_BEGIN(0, 1);
8084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8086 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8088 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8089 IEM_MC_ADVANCE_RIP_AND_FINISH();
8090 IEM_MC_END();
8091 break;
8092
8093 case IEMMODE_32BIT:
8094 IEM_MC_BEGIN(0, 1);
8095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8097 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8099 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8100 IEM_MC_ADVANCE_RIP_AND_FINISH();
8101 IEM_MC_END();
8102 break;
8103
8104 case IEMMODE_64BIT:
8105 IEM_MC_BEGIN(0, 1);
8106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8108 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8110 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8111 IEM_MC_ADVANCE_RIP_AND_FINISH();
8112 IEM_MC_END();
8113 break;
8114
8115 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8116 }
8117 }
8118}
8119
8120
8121
8122
8123/**
8124 * @opcode 0xc8
8125 */
8126FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8127{
8128 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8129 IEMOP_HLP_MIN_186();
8130 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8131 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
8132 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
8133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8134 IEM_MC_DEFER_TO_CIMPL_3_RET(0, iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
8135}
8136
8137
8138/**
8139 * @opcode 0xc9
8140 */
8141FNIEMOP_DEF(iemOp_leave)
8142{
8143 IEMOP_MNEMONIC(leave, "leave");
8144 IEMOP_HLP_MIN_186();
8145 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8147 IEM_MC_DEFER_TO_CIMPL_1_RET(0, iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
8148}
8149
8150
8151/**
8152 * @opcode 0xca
8153 */
8154FNIEMOP_DEF(iemOp_retf_Iw)
8155{
8156 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
8157 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8159 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
8160 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
8161}
8162
8163
8164/**
8165 * @opcode 0xcb
8166 */
8167FNIEMOP_DEF(iemOp_retf)
8168{
8169 IEMOP_MNEMONIC(retf, "retf");
8170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8171 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
8172 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
8173}
8174
8175
8176/**
8177 * @opcode 0xcc
8178 */
8179FNIEMOP_DEF(iemOp_int3)
8180{
8181 IEMOP_MNEMONIC(int3, "int3");
8182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8183 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8184 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8185 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
8186}
8187
8188
8189/**
8190 * @opcode 0xcd
8191 */
8192FNIEMOP_DEF(iemOp_int_Ib)
8193{
8194 IEMOP_MNEMONIC(int_Ib, "int Ib");
8195 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
8196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8197 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8198 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8199 iemCImpl_int, u8Int, IEMINT_INTN);
8200}
8201
8202
8203/**
8204 * @opcode 0xce
8205 */
8206FNIEMOP_DEF(iemOp_into)
8207{
8208 IEMOP_MNEMONIC(into, "into");
8209 IEMOP_HLP_NO_64BIT();
8210 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_CONDITIONAL
8211 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8212 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
8213}
8214
8215
8216/**
8217 * @opcode 0xcf
8218 */
8219FNIEMOP_DEF(iemOp_iret)
8220{
8221 IEMOP_MNEMONIC(iret, "iret");
8222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8223 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8224 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
8225 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
8226}
8227
8228
8229/**
8230 * @opcode 0xd0
8231 */
8232FNIEMOP_DEF(iemOp_Grp2_Eb_1)
8233{
8234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8235 PCIEMOPSHIFTSIZES pImpl;
8236 switch (IEM_GET_MODRM_REG_8(bRm))
8237 {
8238 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
8239 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
8240 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
8241 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
8242 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
8243 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
8244 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
8245 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8246 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8247 }
8248 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8249
8250 if (IEM_IS_MODRM_REG_MODE(bRm))
8251 {
8252 /* register */
8253 IEM_MC_BEGIN(3, 0);
8254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8255 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8256 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8257 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8258 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8259 IEM_MC_REF_EFLAGS(pEFlags);
8260 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8261 IEM_MC_ADVANCE_RIP_AND_FINISH();
8262 IEM_MC_END();
8263 }
8264 else
8265 {
8266 /* memory */
8267 IEM_MC_BEGIN(3, 3);
8268 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8269 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8270 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8272 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8273
8274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8276 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8277 IEM_MC_FETCH_EFLAGS(EFlags);
8278 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8279
8280 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8281 IEM_MC_COMMIT_EFLAGS(EFlags);
8282 IEM_MC_ADVANCE_RIP_AND_FINISH();
8283 IEM_MC_END();
8284 }
8285}
8286
8287
8288
8289/**
8290 * @opcode 0xd1
8291 */
8292FNIEMOP_DEF(iemOp_Grp2_Ev_1)
8293{
8294 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8295 PCIEMOPSHIFTSIZES pImpl;
8296 switch (IEM_GET_MODRM_REG_8(bRm))
8297 {
8298 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
8299 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
8300 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
8301 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
8302 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
8303 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
8304 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
8305 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8306 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8307 }
8308 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8309
8310 if (IEM_IS_MODRM_REG_MODE(bRm))
8311 {
8312 /* register */
8313 switch (pVCpu->iem.s.enmEffOpSize)
8314 {
8315 case IEMMODE_16BIT:
8316 IEM_MC_BEGIN(3, 0);
8317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8318 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8319 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8320 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8321 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8322 IEM_MC_REF_EFLAGS(pEFlags);
8323 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8324 IEM_MC_ADVANCE_RIP_AND_FINISH();
8325 IEM_MC_END();
8326 break;
8327
8328 case IEMMODE_32BIT:
8329 IEM_MC_BEGIN(3, 0);
8330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8331 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8332 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8333 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8334 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8335 IEM_MC_REF_EFLAGS(pEFlags);
8336 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8337 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8338 IEM_MC_ADVANCE_RIP_AND_FINISH();
8339 IEM_MC_END();
8340 break;
8341
8342 case IEMMODE_64BIT:
8343 IEM_MC_BEGIN(3, 0);
8344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8345 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8346 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8347 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8348 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8349 IEM_MC_REF_EFLAGS(pEFlags);
8350 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8351 IEM_MC_ADVANCE_RIP_AND_FINISH();
8352 IEM_MC_END();
8353 break;
8354
8355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8356 }
8357 }
8358 else
8359 {
8360 /* memory */
8361 switch (pVCpu->iem.s.enmEffOpSize)
8362 {
8363 case IEMMODE_16BIT:
8364 IEM_MC_BEGIN(3, 2);
8365 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8366 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8367 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8369
8370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8372 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8373 IEM_MC_FETCH_EFLAGS(EFlags);
8374 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8375
8376 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8377 IEM_MC_COMMIT_EFLAGS(EFlags);
8378 IEM_MC_ADVANCE_RIP_AND_FINISH();
8379 IEM_MC_END();
8380 break;
8381
8382 case IEMMODE_32BIT:
8383 IEM_MC_BEGIN(3, 2);
8384 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8385 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8386 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8388
8389 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8391 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8392 IEM_MC_FETCH_EFLAGS(EFlags);
8393 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8394
8395 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8396 IEM_MC_COMMIT_EFLAGS(EFlags);
8397 IEM_MC_ADVANCE_RIP_AND_FINISH();
8398 IEM_MC_END();
8399 break;
8400
8401 case IEMMODE_64BIT:
8402 IEM_MC_BEGIN(3, 2);
8403 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8404 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8405 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8406 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8407
8408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8410 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8411 IEM_MC_FETCH_EFLAGS(EFlags);
8412 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8413
8414 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8415 IEM_MC_COMMIT_EFLAGS(EFlags);
8416 IEM_MC_ADVANCE_RIP_AND_FINISH();
8417 IEM_MC_END();
8418 break;
8419
8420 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8421 }
8422 }
8423}
8424
8425
8426/**
8427 * @opcode 0xd2
8428 */
8429FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
8430{
8431 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8432 PCIEMOPSHIFTSIZES pImpl;
8433 switch (IEM_GET_MODRM_REG_8(bRm))
8434 {
8435 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
8436 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
8437 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
8438 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
8439 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
8440 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
8441 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
8442 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8443 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
8444 }
8445 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8446
8447 if (IEM_IS_MODRM_REG_MODE(bRm))
8448 {
8449 /* register */
8450 IEM_MC_BEGIN(3, 0);
8451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8452 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8453 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8454 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8455 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8456 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8457 IEM_MC_REF_EFLAGS(pEFlags);
8458 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8459 IEM_MC_ADVANCE_RIP_AND_FINISH();
8460 IEM_MC_END();
8461 }
8462 else
8463 {
8464 /* memory */
8465 IEM_MC_BEGIN(3, 3);
8466 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8467 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8468 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8470 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8471
8472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8474 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8475 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8476 IEM_MC_FETCH_EFLAGS(EFlags);
8477 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8478
8479 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8480 IEM_MC_COMMIT_EFLAGS(EFlags);
8481 IEM_MC_ADVANCE_RIP_AND_FINISH();
8482 IEM_MC_END();
8483 }
8484}
8485
8486
8487/**
8488 * @opcode 0xd3
8489 */
8490FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
8491{
8492 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8493 PCIEMOPSHIFTSIZES pImpl;
8494 switch (IEM_GET_MODRM_REG_8(bRm))
8495 {
8496 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
8497 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
8498 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
8499 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
8500 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
8501 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
8502 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
8503 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8504 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8505 }
8506 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8507
8508 if (IEM_IS_MODRM_REG_MODE(bRm))
8509 {
8510 /* register */
8511 switch (pVCpu->iem.s.enmEffOpSize)
8512 {
8513 case IEMMODE_16BIT:
8514 IEM_MC_BEGIN(3, 0);
8515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8516 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8517 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8519 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8520 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8521 IEM_MC_REF_EFLAGS(pEFlags);
8522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8523 IEM_MC_ADVANCE_RIP_AND_FINISH();
8524 IEM_MC_END();
8525 break;
8526
8527 case IEMMODE_32BIT:
8528 IEM_MC_BEGIN(3, 0);
8529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8530 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8531 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8532 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8533 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8534 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8535 IEM_MC_REF_EFLAGS(pEFlags);
8536 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8537 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8538 IEM_MC_ADVANCE_RIP_AND_FINISH();
8539 IEM_MC_END();
8540 break;
8541
8542 case IEMMODE_64BIT:
8543 IEM_MC_BEGIN(3, 0);
8544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8545 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8546 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8547 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8548 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8549 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8550 IEM_MC_REF_EFLAGS(pEFlags);
8551 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8552 IEM_MC_ADVANCE_RIP_AND_FINISH();
8553 IEM_MC_END();
8554 break;
8555
8556 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8557 }
8558 }
8559 else
8560 {
8561 /* memory */
8562 switch (pVCpu->iem.s.enmEffOpSize)
8563 {
8564 case IEMMODE_16BIT:
8565 IEM_MC_BEGIN(3, 2);
8566 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8567 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8568 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8570
8571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8573 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8574 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8575 IEM_MC_FETCH_EFLAGS(EFlags);
8576 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8577
8578 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8579 IEM_MC_COMMIT_EFLAGS(EFlags);
8580 IEM_MC_ADVANCE_RIP_AND_FINISH();
8581 IEM_MC_END();
8582 break;
8583
8584 case IEMMODE_32BIT:
8585 IEM_MC_BEGIN(3, 2);
8586 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8587 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8588 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8589 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8590
8591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8593 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8594 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8595 IEM_MC_FETCH_EFLAGS(EFlags);
8596 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8597
8598 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8599 IEM_MC_COMMIT_EFLAGS(EFlags);
8600 IEM_MC_ADVANCE_RIP_AND_FINISH();
8601 IEM_MC_END();
8602 break;
8603
8604 case IEMMODE_64BIT:
8605 IEM_MC_BEGIN(3, 2);
8606 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8607 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8608 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8610
8611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8613 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8614 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8615 IEM_MC_FETCH_EFLAGS(EFlags);
8616 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8617
8618 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8619 IEM_MC_COMMIT_EFLAGS(EFlags);
8620 IEM_MC_ADVANCE_RIP_AND_FINISH();
8621 IEM_MC_END();
8622 break;
8623
8624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8625 }
8626 }
8627}
8628
8629/**
8630 * @opcode 0xd4
8631 */
8632FNIEMOP_DEF(iemOp_aam_Ib)
8633{
8634 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
8635 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
8636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8637 IEMOP_HLP_NO_64BIT();
8638 if (!bImm)
8639 IEMOP_RAISE_DIVIDE_ERROR_RET();
8640 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aam, bImm);
8641}
8642
8643
8644/**
8645 * @opcode 0xd5
8646 */
8647FNIEMOP_DEF(iemOp_aad_Ib)
8648{
8649 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
8650 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
8651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8652 IEMOP_HLP_NO_64BIT();
8653 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aad, bImm);
8654}
8655
8656
8657/**
8658 * @opcode 0xd6
8659 */
8660FNIEMOP_DEF(iemOp_salc)
8661{
8662 IEMOP_MNEMONIC(salc, "salc");
8663 IEMOP_HLP_NO_64BIT();
8664
8665 IEM_MC_BEGIN(0, 0);
8666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8667 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8668 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
8669 } IEM_MC_ELSE() {
8670 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
8671 } IEM_MC_ENDIF();
8672 IEM_MC_ADVANCE_RIP_AND_FINISH();
8673 IEM_MC_END();
8674}
8675
8676
8677/**
8678 * @opcode 0xd7
8679 */
8680FNIEMOP_DEF(iemOp_xlat)
8681{
8682 IEMOP_MNEMONIC(xlat, "xlat");
8683 switch (pVCpu->iem.s.enmEffAddrMode)
8684 {
8685 case IEMMODE_16BIT:
8686 IEM_MC_BEGIN(2, 0);
8687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8688 IEM_MC_LOCAL(uint8_t, u8Tmp);
8689 IEM_MC_LOCAL(uint16_t, u16Addr);
8690 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
8691 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
8692 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
8693 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8694 IEM_MC_ADVANCE_RIP_AND_FINISH();
8695 IEM_MC_END();
8696 break;
8697
8698 case IEMMODE_32BIT:
8699 IEM_MC_BEGIN(2, 0);
8700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8701 IEM_MC_LOCAL(uint8_t, u8Tmp);
8702 IEM_MC_LOCAL(uint32_t, u32Addr);
8703 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
8704 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
8705 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
8706 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8707 IEM_MC_ADVANCE_RIP_AND_FINISH();
8708 IEM_MC_END();
8709 break;
8710
8711 case IEMMODE_64BIT:
8712 IEM_MC_BEGIN(2, 0);
8713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8714 IEM_MC_LOCAL(uint8_t, u8Tmp);
8715 IEM_MC_LOCAL(uint64_t, u64Addr);
8716 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
8717 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
8718 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
8719 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8720 IEM_MC_ADVANCE_RIP_AND_FINISH();
8721 IEM_MC_END();
8722 break;
8723
8724 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8725 }
8726}
8727
8728
8729/**
8730 * Common worker for FPU instructions working on ST0 and STn, and storing the
8731 * result in ST0.
8732 *
8733 * @param bRm Mod R/M byte.
8734 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8735 */
8736FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8737{
8738 IEM_MC_BEGIN(3, 1);
8739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8740 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8741 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8742 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8743 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8744
8745 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8746 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8747 IEM_MC_PREPARE_FPU_USAGE();
8748 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8749 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8750 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8751 } IEM_MC_ELSE() {
8752 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8753 } IEM_MC_ENDIF();
8754 IEM_MC_ADVANCE_RIP_AND_FINISH();
8755
8756 IEM_MC_END();
8757}
8758
8759
8760/**
8761 * Common worker for FPU instructions working on ST0 and STn, and only affecting
8762 * flags.
8763 *
8764 * @param bRm Mod R/M byte.
8765 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8766 */
8767FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8768{
8769 IEM_MC_BEGIN(3, 1);
8770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8771 IEM_MC_LOCAL(uint16_t, u16Fsw);
8772 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8773 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8774 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8775
8776 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8777 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8778 IEM_MC_PREPARE_FPU_USAGE();
8779 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8780 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8781 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8782 } IEM_MC_ELSE() {
8783 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8784 } IEM_MC_ENDIF();
8785 IEM_MC_ADVANCE_RIP_AND_FINISH();
8786
8787 IEM_MC_END();
8788}
8789
8790
8791/**
8792 * Common worker for FPU instructions working on ST0 and STn, only affecting
8793 * flags, and popping when done.
8794 *
8795 * @param bRm Mod R/M byte.
8796 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8797 */
8798FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8799{
8800 IEM_MC_BEGIN(3, 1);
8801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8802 IEM_MC_LOCAL(uint16_t, u16Fsw);
8803 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8804 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8805 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8806
8807 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8808 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8809 IEM_MC_PREPARE_FPU_USAGE();
8810 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8811 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8812 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8813 } IEM_MC_ELSE() {
8814 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8815 } IEM_MC_ENDIF();
8816 IEM_MC_ADVANCE_RIP_AND_FINISH();
8817
8818 IEM_MC_END();
8819}
8820
8821
8822/** Opcode 0xd8 11/0. */
8823FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
8824{
8825 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
8826 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
8827}
8828
8829
8830/** Opcode 0xd8 11/1. */
8831FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
8832{
8833 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
8834 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
8835}
8836
8837
8838/** Opcode 0xd8 11/2. */
8839FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
8840{
8841 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
8842 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
8843}
8844
8845
8846/** Opcode 0xd8 11/3. */
8847FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
8848{
8849 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
8850 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
8851}
8852
8853
8854/** Opcode 0xd8 11/4. */
8855FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
8856{
8857 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
8858 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
8859}
8860
8861
8862/** Opcode 0xd8 11/5. */
8863FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
8864{
8865 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
8866 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
8867}
8868
8869
8870/** Opcode 0xd8 11/6. */
8871FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
8872{
8873 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
8874 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
8875}
8876
8877
8878/** Opcode 0xd8 11/7. */
8879FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
8880{
8881 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
8882 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
8883}
8884
8885
8886/**
8887 * Common worker for FPU instructions working on ST0 and an m32r, and storing
8888 * the result in ST0.
8889 *
8890 * @param bRm Mod R/M byte.
8891 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8892 */
8893FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
8894{
8895 IEM_MC_BEGIN(3, 3);
8896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8897 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8898 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8899 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8900 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8901 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8902
8903 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8905
8906 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8907 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8908 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8909
8910 IEM_MC_PREPARE_FPU_USAGE();
8911 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8912 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
8913 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8914 } IEM_MC_ELSE() {
8915 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8916 } IEM_MC_ENDIF();
8917 IEM_MC_ADVANCE_RIP_AND_FINISH();
8918
8919 IEM_MC_END();
8920}
8921
8922
8923/** Opcode 0xd8 !11/0. */
8924FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
8925{
8926 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
8927 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
8928}
8929
8930
8931/** Opcode 0xd8 !11/1. */
8932FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
8933{
8934 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
8935 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
8936}
8937
8938
8939/** Opcode 0xd8 !11/2. */
8940FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
8941{
8942 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
8943
8944 IEM_MC_BEGIN(3, 3);
8945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8946 IEM_MC_LOCAL(uint16_t, u16Fsw);
8947 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8948 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8949 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8950 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8951
8952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8954
8955 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8956 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8957 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8958
8959 IEM_MC_PREPARE_FPU_USAGE();
8960 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8961 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8962 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8963 } IEM_MC_ELSE() {
8964 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8965 } IEM_MC_ENDIF();
8966 IEM_MC_ADVANCE_RIP_AND_FINISH();
8967
8968 IEM_MC_END();
8969}
8970
8971
8972/** Opcode 0xd8 !11/3. */
8973FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
8974{
8975 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
8976
8977 IEM_MC_BEGIN(3, 3);
8978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8979 IEM_MC_LOCAL(uint16_t, u16Fsw);
8980 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8981 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8982 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8983 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8984
8985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8987
8988 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8989 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8990 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8991
8992 IEM_MC_PREPARE_FPU_USAGE();
8993 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8994 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8995 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8996 } IEM_MC_ELSE() {
8997 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8998 } IEM_MC_ENDIF();
8999 IEM_MC_ADVANCE_RIP_AND_FINISH();
9000
9001 IEM_MC_END();
9002}
9003
9004
9005/** Opcode 0xd8 !11/4. */
9006FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
9007{
9008 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
9009 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
9010}
9011
9012
9013/** Opcode 0xd8 !11/5. */
9014FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
9015{
9016 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
9017 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
9018}
9019
9020
9021/** Opcode 0xd8 !11/6. */
9022FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
9023{
9024 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
9025 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
9026}
9027
9028
9029/** Opcode 0xd8 !11/7. */
9030FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
9031{
9032 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
9033 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
9034}
9035
9036
9037/**
9038 * @opcode 0xd8
9039 */
9040FNIEMOP_DEF(iemOp_EscF0)
9041{
9042 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9043 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
9044
9045 if (IEM_IS_MODRM_REG_MODE(bRm))
9046 {
9047 switch (IEM_GET_MODRM_REG_8(bRm))
9048 {
9049 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
9050 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
9051 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
9052 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9053 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
9054 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
9055 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
9056 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
9057 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9058 }
9059 }
9060 else
9061 {
9062 switch (IEM_GET_MODRM_REG_8(bRm))
9063 {
9064 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
9065 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
9066 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
9067 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
9068 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
9069 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
9070 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
9071 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
9072 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9073 }
9074 }
9075}
9076
9077
9078/** Opcode 0xd9 /0 mem32real
9079 * @sa iemOp_fld_m64r */
9080FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
9081{
9082 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
9083
9084 IEM_MC_BEGIN(2, 3);
9085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9086 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9087 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
9088 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9089 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
9090
9091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9093
9094 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9095 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9096 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9097 IEM_MC_PREPARE_FPU_USAGE();
9098 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9099 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
9100 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9101 } IEM_MC_ELSE() {
9102 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9103 } IEM_MC_ENDIF();
9104 IEM_MC_ADVANCE_RIP_AND_FINISH();
9105
9106 IEM_MC_END();
9107}
9108
9109
9110/** Opcode 0xd9 !11/2 mem32real */
9111FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
9112{
9113 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
9114 IEM_MC_BEGIN(3, 2);
9115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9116 IEM_MC_LOCAL(uint16_t, u16Fsw);
9117 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9118 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9119 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9120
9121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9123 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9124 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9125
9126 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9127 IEM_MC_PREPARE_FPU_USAGE();
9128 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9129 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9130 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9131 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9132 } IEM_MC_ELSE() {
9133 IEM_MC_IF_FCW_IM() {
9134 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9135 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9136 } IEM_MC_ENDIF();
9137 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9138 } IEM_MC_ENDIF();
9139 IEM_MC_ADVANCE_RIP_AND_FINISH();
9140
9141 IEM_MC_END();
9142}
9143
9144
9145/** Opcode 0xd9 !11/3 */
9146FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
9147{
9148 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
9149 IEM_MC_BEGIN(3, 2);
9150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9151 IEM_MC_LOCAL(uint16_t, u16Fsw);
9152 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9153 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9154 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9155
9156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9158 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9159 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9160
9161 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9162 IEM_MC_PREPARE_FPU_USAGE();
9163 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9164 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9165 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9166 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9167 } IEM_MC_ELSE() {
9168 IEM_MC_IF_FCW_IM() {
9169 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9170 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9171 } IEM_MC_ENDIF();
9172 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9173 } IEM_MC_ENDIF();
9174 IEM_MC_ADVANCE_RIP_AND_FINISH();
9175
9176 IEM_MC_END();
9177}
9178
9179
9180/** Opcode 0xd9 !11/4 */
9181FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
9182{
9183 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
9184 IEM_MC_BEGIN(3, 0);
9185 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9186 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9187 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9190 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9191 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9192 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9193 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9194 IEM_MC_END();
9195}
9196
9197
9198/** Opcode 0xd9 !11/5 */
9199FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
9200{
9201 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
9202 IEM_MC_BEGIN(1, 1);
9203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9204 IEM_MC_ARG(uint16_t, u16Fsw, 0);
9205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9207 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9208 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9209 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9210 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, iemCImpl_fldcw, u16Fsw);
9211 IEM_MC_END();
9212}
9213
9214
9215/** Opcode 0xd9 !11/6 */
9216FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
9217{
9218 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
9219 IEM_MC_BEGIN(3, 0);
9220 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9221 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9222 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9225 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9226 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9227 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9228 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
9229 IEM_MC_END();
9230}
9231
9232
9233/** Opcode 0xd9 !11/7 */
9234FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
9235{
9236 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
9237 IEM_MC_BEGIN(2, 0);
9238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9239 IEM_MC_LOCAL(uint16_t, u16Fcw);
9240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9242 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9243 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9244 IEM_MC_FETCH_FCW(u16Fcw);
9245 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
9246 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9247 IEM_MC_END();
9248}
9249
9250
9251/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
9252FNIEMOP_DEF(iemOp_fnop)
9253{
9254 IEMOP_MNEMONIC(fnop, "fnop");
9255 IEM_MC_BEGIN(0, 0);
9256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9257 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9258 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9259 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9260 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
9261 * intel optimizations. Investigate. */
9262 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9263 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9264 IEM_MC_END();
9265}
9266
9267
9268/** Opcode 0xd9 11/0 stN */
9269FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
9270{
9271 IEMOP_MNEMONIC(fld_stN, "fld stN");
9272 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9273 * indicates that it does. */
9274 IEM_MC_BEGIN(0, 2);
9275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9276 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9277 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9278 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9279 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9280
9281 IEM_MC_PREPARE_FPU_USAGE();
9282 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
9283 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9284 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9285 } IEM_MC_ELSE() {
9286 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
9287 } IEM_MC_ENDIF();
9288
9289 IEM_MC_ADVANCE_RIP_AND_FINISH();
9290 IEM_MC_END();
9291}
9292
9293
9294/** Opcode 0xd9 11/3 stN */
9295FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
9296{
9297 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
9298 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9299 * indicates that it does. */
9300 IEM_MC_BEGIN(2, 3);
9301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9302 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
9303 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
9304 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9305 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
9306 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
9307 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9308 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9309
9310 IEM_MC_PREPARE_FPU_USAGE();
9311 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9312 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
9313 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
9314 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9315 } IEM_MC_ELSE() {
9316 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
9317 } IEM_MC_ENDIF();
9318
9319 IEM_MC_ADVANCE_RIP_AND_FINISH();
9320 IEM_MC_END();
9321}
9322
9323
9324/** Opcode 0xd9 11/4, 0xdd 11/2. */
9325FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
9326{
9327 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
9328
9329 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
9330 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
9331 if (!iDstReg)
9332 {
9333 IEM_MC_BEGIN(0, 1);
9334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9335 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
9336 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9337 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9338
9339 IEM_MC_PREPARE_FPU_USAGE();
9340 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
9341 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9342 } IEM_MC_ELSE() {
9343 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
9344 } IEM_MC_ENDIF();
9345
9346 IEM_MC_ADVANCE_RIP_AND_FINISH();
9347 IEM_MC_END();
9348 }
9349 else
9350 {
9351 IEM_MC_BEGIN(0, 2);
9352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9353 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9354 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9355 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9356 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9357
9358 IEM_MC_PREPARE_FPU_USAGE();
9359 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9360 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9361 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
9362 } IEM_MC_ELSE() {
9363 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
9364 } IEM_MC_ENDIF();
9365
9366 IEM_MC_ADVANCE_RIP_AND_FINISH();
9367 IEM_MC_END();
9368 }
9369}
9370
9371
9372/**
9373 * Common worker for FPU instructions working on ST0 and replaces it with the
9374 * result, i.e. unary operators.
9375 *
9376 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9377 */
9378FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
9379{
9380 IEM_MC_BEGIN(2, 1);
9381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9382 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9383 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9384 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9385
9386 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9387 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9388 IEM_MC_PREPARE_FPU_USAGE();
9389 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9390 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
9391 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9392 } IEM_MC_ELSE() {
9393 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9394 } IEM_MC_ENDIF();
9395 IEM_MC_ADVANCE_RIP_AND_FINISH();
9396
9397 IEM_MC_END();
9398}
9399
9400
9401/** Opcode 0xd9 0xe0. */
9402FNIEMOP_DEF(iemOp_fchs)
9403{
9404 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
9405 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
9406}
9407
9408
9409/** Opcode 0xd9 0xe1. */
9410FNIEMOP_DEF(iemOp_fabs)
9411{
9412 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
9413 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
9414}
9415
9416
9417/** Opcode 0xd9 0xe4. */
9418FNIEMOP_DEF(iemOp_ftst)
9419{
9420 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
9421 IEM_MC_BEGIN(2, 1);
9422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9423 IEM_MC_LOCAL(uint16_t, u16Fsw);
9424 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9425 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9426
9427 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9428 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9429 IEM_MC_PREPARE_FPU_USAGE();
9430 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9431 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
9432 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9433 } IEM_MC_ELSE() {
9434 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9435 } IEM_MC_ENDIF();
9436 IEM_MC_ADVANCE_RIP_AND_FINISH();
9437
9438 IEM_MC_END();
9439}
9440
9441
9442/** Opcode 0xd9 0xe5. */
9443FNIEMOP_DEF(iemOp_fxam)
9444{
9445 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
9446 IEM_MC_BEGIN(2, 1);
9447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9448 IEM_MC_LOCAL(uint16_t, u16Fsw);
9449 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9450 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9451
9452 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9453 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9454 IEM_MC_PREPARE_FPU_USAGE();
9455 IEM_MC_REF_FPUREG(pr80Value, 0);
9456 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
9457 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9458 IEM_MC_ADVANCE_RIP_AND_FINISH();
9459
9460 IEM_MC_END();
9461}
9462
9463
9464/**
9465 * Common worker for FPU instructions pushing a constant onto the FPU stack.
9466 *
9467 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9468 */
9469FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
9470{
9471 IEM_MC_BEGIN(1, 1);
9472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9473 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9474 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9475
9476 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9477 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9478 IEM_MC_PREPARE_FPU_USAGE();
9479 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9480 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
9481 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9482 } IEM_MC_ELSE() {
9483 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
9484 } IEM_MC_ENDIF();
9485 IEM_MC_ADVANCE_RIP_AND_FINISH();
9486
9487 IEM_MC_END();
9488}
9489
9490
9491/** Opcode 0xd9 0xe8. */
9492FNIEMOP_DEF(iemOp_fld1)
9493{
9494 IEMOP_MNEMONIC(fld1, "fld1");
9495 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
9496}
9497
9498
9499/** Opcode 0xd9 0xe9. */
9500FNIEMOP_DEF(iemOp_fldl2t)
9501{
9502 IEMOP_MNEMONIC(fldl2t, "fldl2t");
9503 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
9504}
9505
9506
9507/** Opcode 0xd9 0xea. */
9508FNIEMOP_DEF(iemOp_fldl2e)
9509{
9510 IEMOP_MNEMONIC(fldl2e, "fldl2e");
9511 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
9512}
9513
9514/** Opcode 0xd9 0xeb. */
9515FNIEMOP_DEF(iemOp_fldpi)
9516{
9517 IEMOP_MNEMONIC(fldpi, "fldpi");
9518 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
9519}
9520
9521
9522/** Opcode 0xd9 0xec. */
9523FNIEMOP_DEF(iemOp_fldlg2)
9524{
9525 IEMOP_MNEMONIC(fldlg2, "fldlg2");
9526 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
9527}
9528
9529/** Opcode 0xd9 0xed. */
9530FNIEMOP_DEF(iemOp_fldln2)
9531{
9532 IEMOP_MNEMONIC(fldln2, "fldln2");
9533 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
9534}
9535
9536
9537/** Opcode 0xd9 0xee. */
9538FNIEMOP_DEF(iemOp_fldz)
9539{
9540 IEMOP_MNEMONIC(fldz, "fldz");
9541 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
9542}
9543
9544
9545/** Opcode 0xd9 0xf0.
9546 *
9547 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
9548 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
9549 * to produce proper results for +Inf and -Inf.
9550 *
9551 * This is probably usful in the implementation pow() and similar.
9552 */
9553FNIEMOP_DEF(iemOp_f2xm1)
9554{
9555 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
9556 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
9557}
9558
9559
9560/**
9561 * Common worker for FPU instructions working on STn and ST0, storing the result
9562 * in STn, and popping the stack unless IE, DE or ZE was raised.
9563 *
9564 * @param bRm Mod R/M byte.
9565 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9566 */
9567FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9568{
9569 IEM_MC_BEGIN(3, 1);
9570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9571 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9572 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9573 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9574 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9575
9576 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9577 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9578
9579 IEM_MC_PREPARE_FPU_USAGE();
9580 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
9581 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9582 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
9583 } IEM_MC_ELSE() {
9584 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
9585 } IEM_MC_ENDIF();
9586 IEM_MC_ADVANCE_RIP_AND_FINISH();
9587
9588 IEM_MC_END();
9589}
9590
9591
9592/** Opcode 0xd9 0xf1. */
9593FNIEMOP_DEF(iemOp_fyl2x)
9594{
9595 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
9596 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
9597}
9598
9599
9600/**
9601 * Common worker for FPU instructions working on ST0 and having two outputs, one
9602 * replacing ST0 and one pushed onto the stack.
9603 *
9604 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9605 */
9606FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
9607{
9608 IEM_MC_BEGIN(2, 1);
9609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9610 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
9611 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
9612 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9613
9614 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9615 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9616 IEM_MC_PREPARE_FPU_USAGE();
9617 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9618 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
9619 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
9620 } IEM_MC_ELSE() {
9621 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
9622 } IEM_MC_ENDIF();
9623 IEM_MC_ADVANCE_RIP_AND_FINISH();
9624
9625 IEM_MC_END();
9626}
9627
9628
9629/** Opcode 0xd9 0xf2. */
9630FNIEMOP_DEF(iemOp_fptan)
9631{
9632 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
9633 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
9634}
9635
9636
9637/** Opcode 0xd9 0xf3. */
9638FNIEMOP_DEF(iemOp_fpatan)
9639{
9640 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
9641 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
9642}
9643
9644
9645/** Opcode 0xd9 0xf4. */
9646FNIEMOP_DEF(iemOp_fxtract)
9647{
9648 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
9649 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
9650}
9651
9652
9653/** Opcode 0xd9 0xf5. */
9654FNIEMOP_DEF(iemOp_fprem1)
9655{
9656 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
9657 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
9658}
9659
9660
9661/** Opcode 0xd9 0xf6. */
9662FNIEMOP_DEF(iemOp_fdecstp)
9663{
9664 IEMOP_MNEMONIC(fdecstp, "fdecstp");
9665 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9666 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9667 * FINCSTP and FDECSTP. */
9668 IEM_MC_BEGIN(0,0);
9669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9670
9671 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9672 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9673
9674 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9675 IEM_MC_FPU_STACK_DEC_TOP();
9676 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9677
9678 IEM_MC_ADVANCE_RIP_AND_FINISH();
9679 IEM_MC_END();
9680}
9681
9682
9683/** Opcode 0xd9 0xf7. */
9684FNIEMOP_DEF(iemOp_fincstp)
9685{
9686 IEMOP_MNEMONIC(fincstp, "fincstp");
9687 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9688 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9689 * FINCSTP and FDECSTP. */
9690 IEM_MC_BEGIN(0,0);
9691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9692
9693 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9694 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9695
9696 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9697 IEM_MC_FPU_STACK_INC_TOP();
9698 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9699
9700 IEM_MC_ADVANCE_RIP_AND_FINISH();
9701 IEM_MC_END();
9702}
9703
9704
9705/** Opcode 0xd9 0xf8. */
9706FNIEMOP_DEF(iemOp_fprem)
9707{
9708 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
9709 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
9710}
9711
9712
9713/** Opcode 0xd9 0xf9. */
9714FNIEMOP_DEF(iemOp_fyl2xp1)
9715{
9716 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
9717 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
9718}
9719
9720
9721/** Opcode 0xd9 0xfa. */
9722FNIEMOP_DEF(iemOp_fsqrt)
9723{
9724 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
9725 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
9726}
9727
9728
9729/** Opcode 0xd9 0xfb. */
9730FNIEMOP_DEF(iemOp_fsincos)
9731{
9732 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
9733 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
9734}
9735
9736
9737/** Opcode 0xd9 0xfc. */
9738FNIEMOP_DEF(iemOp_frndint)
9739{
9740 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
9741 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
9742}
9743
9744
9745/** Opcode 0xd9 0xfd. */
9746FNIEMOP_DEF(iemOp_fscale)
9747{
9748 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
9749 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
9750}
9751
9752
9753/** Opcode 0xd9 0xfe. */
9754FNIEMOP_DEF(iemOp_fsin)
9755{
9756 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
9757 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
9758}
9759
9760
9761/** Opcode 0xd9 0xff. */
9762FNIEMOP_DEF(iemOp_fcos)
9763{
9764 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
9765 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
9766}
9767
9768
9769/** Used by iemOp_EscF1. */
9770IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
9771{
9772 /* 0xe0 */ iemOp_fchs,
9773 /* 0xe1 */ iemOp_fabs,
9774 /* 0xe2 */ iemOp_Invalid,
9775 /* 0xe3 */ iemOp_Invalid,
9776 /* 0xe4 */ iemOp_ftst,
9777 /* 0xe5 */ iemOp_fxam,
9778 /* 0xe6 */ iemOp_Invalid,
9779 /* 0xe7 */ iemOp_Invalid,
9780 /* 0xe8 */ iemOp_fld1,
9781 /* 0xe9 */ iemOp_fldl2t,
9782 /* 0xea */ iemOp_fldl2e,
9783 /* 0xeb */ iemOp_fldpi,
9784 /* 0xec */ iemOp_fldlg2,
9785 /* 0xed */ iemOp_fldln2,
9786 /* 0xee */ iemOp_fldz,
9787 /* 0xef */ iemOp_Invalid,
9788 /* 0xf0 */ iemOp_f2xm1,
9789 /* 0xf1 */ iemOp_fyl2x,
9790 /* 0xf2 */ iemOp_fptan,
9791 /* 0xf3 */ iemOp_fpatan,
9792 /* 0xf4 */ iemOp_fxtract,
9793 /* 0xf5 */ iemOp_fprem1,
9794 /* 0xf6 */ iemOp_fdecstp,
9795 /* 0xf7 */ iemOp_fincstp,
9796 /* 0xf8 */ iemOp_fprem,
9797 /* 0xf9 */ iemOp_fyl2xp1,
9798 /* 0xfa */ iemOp_fsqrt,
9799 /* 0xfb */ iemOp_fsincos,
9800 /* 0xfc */ iemOp_frndint,
9801 /* 0xfd */ iemOp_fscale,
9802 /* 0xfe */ iemOp_fsin,
9803 /* 0xff */ iemOp_fcos
9804};
9805
9806
9807/**
9808 * @opcode 0xd9
9809 */
9810FNIEMOP_DEF(iemOp_EscF1)
9811{
9812 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9813 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
9814
9815 if (IEM_IS_MODRM_REG_MODE(bRm))
9816 {
9817 switch (IEM_GET_MODRM_REG_8(bRm))
9818 {
9819 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
9820 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
9821 case 2:
9822 if (bRm == 0xd0)
9823 return FNIEMOP_CALL(iemOp_fnop);
9824 IEMOP_RAISE_INVALID_OPCODE_RET();
9825 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
9826 case 4:
9827 case 5:
9828 case 6:
9829 case 7:
9830 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
9831 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
9832 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9833 }
9834 }
9835 else
9836 {
9837 switch (IEM_GET_MODRM_REG_8(bRm))
9838 {
9839 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
9840 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
9841 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
9842 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
9843 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
9844 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
9845 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
9846 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
9847 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9848 }
9849 }
9850}
9851
9852
9853/** Opcode 0xda 11/0. */
9854FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
9855{
9856 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
9857 IEM_MC_BEGIN(0, 1);
9858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9859 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9860
9861 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9862 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9863
9864 IEM_MC_PREPARE_FPU_USAGE();
9865 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9866 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9867 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9868 } IEM_MC_ENDIF();
9869 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9870 } IEM_MC_ELSE() {
9871 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9872 } IEM_MC_ENDIF();
9873 IEM_MC_ADVANCE_RIP_AND_FINISH();
9874
9875 IEM_MC_END();
9876}
9877
9878
9879/** Opcode 0xda 11/1. */
9880FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
9881{
9882 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
9883 IEM_MC_BEGIN(0, 1);
9884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9885 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9886
9887 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9888 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9889
9890 IEM_MC_PREPARE_FPU_USAGE();
9891 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9892 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9893 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9894 } IEM_MC_ENDIF();
9895 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9896 } IEM_MC_ELSE() {
9897 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9898 } IEM_MC_ENDIF();
9899 IEM_MC_ADVANCE_RIP_AND_FINISH();
9900
9901 IEM_MC_END();
9902}
9903
9904
9905/** Opcode 0xda 11/2. */
9906FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
9907{
9908 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
9909 IEM_MC_BEGIN(0, 1);
9910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9911 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9912
9913 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9914 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9915
9916 IEM_MC_PREPARE_FPU_USAGE();
9917 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9918 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9919 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9920 } IEM_MC_ENDIF();
9921 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9922 } IEM_MC_ELSE() {
9923 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9924 } IEM_MC_ENDIF();
9925 IEM_MC_ADVANCE_RIP_AND_FINISH();
9926
9927 IEM_MC_END();
9928}
9929
9930
9931/** Opcode 0xda 11/3. */
9932FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
9933{
9934 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
9935 IEM_MC_BEGIN(0, 1);
9936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9937 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9938
9939 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9940 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9941
9942 IEM_MC_PREPARE_FPU_USAGE();
9943 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9944 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9945 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9946 } IEM_MC_ENDIF();
9947 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9948 } IEM_MC_ELSE() {
9949 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9950 } IEM_MC_ENDIF();
9951 IEM_MC_ADVANCE_RIP_AND_FINISH();
9952
9953 IEM_MC_END();
9954}
9955
9956
9957/**
9958 * Common worker for FPU instructions working on ST0 and ST1, only affecting
9959 * flags, and popping twice when done.
9960 *
9961 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9962 */
9963FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9964{
9965 IEM_MC_BEGIN(3, 1);
9966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9967 IEM_MC_LOCAL(uint16_t, u16Fsw);
9968 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9969 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9970 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9971
9972 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9973 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9974
9975 IEM_MC_PREPARE_FPU_USAGE();
9976 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
9977 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9978 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9979 } IEM_MC_ELSE() {
9980 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
9981 } IEM_MC_ENDIF();
9982 IEM_MC_ADVANCE_RIP_AND_FINISH();
9983
9984 IEM_MC_END();
9985}
9986
9987
9988/** Opcode 0xda 0xe9. */
9989FNIEMOP_DEF(iemOp_fucompp)
9990{
9991 IEMOP_MNEMONIC(fucompp, "fucompp");
9992 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
9993}
9994
9995
9996/**
9997 * Common worker for FPU instructions working on ST0 and an m32i, and storing
9998 * the result in ST0.
9999 *
10000 * @param bRm Mod R/M byte.
10001 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10002 */
10003FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
10004{
10005 IEM_MC_BEGIN(3, 3);
10006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10007 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10008 IEM_MC_LOCAL(int32_t, i32Val2);
10009 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10010 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10011 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10012
10013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10015
10016 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10017 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10018 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10019
10020 IEM_MC_PREPARE_FPU_USAGE();
10021 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10022 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
10023 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10024 } IEM_MC_ELSE() {
10025 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10026 } IEM_MC_ENDIF();
10027 IEM_MC_ADVANCE_RIP_AND_FINISH();
10028
10029 IEM_MC_END();
10030}
10031
10032
10033/** Opcode 0xda !11/0. */
10034FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
10035{
10036 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
10037 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
10038}
10039
10040
10041/** Opcode 0xda !11/1. */
10042FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
10043{
10044 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
10045 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
10046}
10047
10048
10049/** Opcode 0xda !11/2. */
10050FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
10051{
10052 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
10053
10054 IEM_MC_BEGIN(3, 3);
10055 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10056 IEM_MC_LOCAL(uint16_t, u16Fsw);
10057 IEM_MC_LOCAL(int32_t, i32Val2);
10058 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10059 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10060 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10061
10062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10064
10065 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10066 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10067 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10068
10069 IEM_MC_PREPARE_FPU_USAGE();
10070 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10071 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10072 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10073 } IEM_MC_ELSE() {
10074 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10075 } IEM_MC_ENDIF();
10076 IEM_MC_ADVANCE_RIP_AND_FINISH();
10077
10078 IEM_MC_END();
10079}
10080
10081
10082/** Opcode 0xda !11/3. */
10083FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
10084{
10085 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
10086
10087 IEM_MC_BEGIN(3, 3);
10088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10089 IEM_MC_LOCAL(uint16_t, u16Fsw);
10090 IEM_MC_LOCAL(int32_t, i32Val2);
10091 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10092 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10093 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10094
10095 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10097
10098 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10099 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10100 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10101
10102 IEM_MC_PREPARE_FPU_USAGE();
10103 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10104 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10105 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10106 } IEM_MC_ELSE() {
10107 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10108 } IEM_MC_ENDIF();
10109 IEM_MC_ADVANCE_RIP_AND_FINISH();
10110
10111 IEM_MC_END();
10112}
10113
10114
10115/** Opcode 0xda !11/4. */
10116FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
10117{
10118 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
10119 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
10120}
10121
10122
10123/** Opcode 0xda !11/5. */
10124FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
10125{
10126 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
10127 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
10128}
10129
10130
10131/** Opcode 0xda !11/6. */
10132FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
10133{
10134 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
10135 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
10136}
10137
10138
10139/** Opcode 0xda !11/7. */
10140FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
10141{
10142 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
10143 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
10144}
10145
10146
10147/**
10148 * @opcode 0xda
10149 */
10150FNIEMOP_DEF(iemOp_EscF2)
10151{
10152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10153 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
10154 if (IEM_IS_MODRM_REG_MODE(bRm))
10155 {
10156 switch (IEM_GET_MODRM_REG_8(bRm))
10157 {
10158 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
10159 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
10160 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
10161 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
10162 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10163 case 5:
10164 if (bRm == 0xe9)
10165 return FNIEMOP_CALL(iemOp_fucompp);
10166 IEMOP_RAISE_INVALID_OPCODE_RET();
10167 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10168 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10169 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10170 }
10171 }
10172 else
10173 {
10174 switch (IEM_GET_MODRM_REG_8(bRm))
10175 {
10176 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
10177 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
10178 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
10179 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
10180 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
10181 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
10182 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
10183 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
10184 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10185 }
10186 }
10187}
10188
10189
10190/** Opcode 0xdb !11/0. */
10191FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
10192{
10193 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
10194
10195 IEM_MC_BEGIN(2, 3);
10196 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10197 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10198 IEM_MC_LOCAL(int32_t, i32Val);
10199 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10200 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
10201
10202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10204
10205 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10206 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10207 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10208
10209 IEM_MC_PREPARE_FPU_USAGE();
10210 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10211 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
10212 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10213 } IEM_MC_ELSE() {
10214 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10215 } IEM_MC_ENDIF();
10216 IEM_MC_ADVANCE_RIP_AND_FINISH();
10217
10218 IEM_MC_END();
10219}
10220
10221
10222/** Opcode 0xdb !11/1. */
10223FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
10224{
10225 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
10226 IEM_MC_BEGIN(3, 2);
10227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10228 IEM_MC_LOCAL(uint16_t, u16Fsw);
10229 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10230 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10231 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10232
10233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10235 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10236 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10237
10238 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10239 IEM_MC_PREPARE_FPU_USAGE();
10240 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10241 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10242 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10243 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10244 } IEM_MC_ELSE() {
10245 IEM_MC_IF_FCW_IM() {
10246 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10247 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10248 } IEM_MC_ENDIF();
10249 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10250 } IEM_MC_ENDIF();
10251 IEM_MC_ADVANCE_RIP_AND_FINISH();
10252
10253 IEM_MC_END();
10254}
10255
10256
10257/** Opcode 0xdb !11/2. */
10258FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
10259{
10260 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
10261 IEM_MC_BEGIN(3, 2);
10262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10263 IEM_MC_LOCAL(uint16_t, u16Fsw);
10264 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10265 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10266 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10267
10268 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10270 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10271 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10272
10273 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10274 IEM_MC_PREPARE_FPU_USAGE();
10275 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10276 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10277 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10278 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10279 } IEM_MC_ELSE() {
10280 IEM_MC_IF_FCW_IM() {
10281 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10282 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10283 } IEM_MC_ENDIF();
10284 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10285 } IEM_MC_ENDIF();
10286 IEM_MC_ADVANCE_RIP_AND_FINISH();
10287
10288 IEM_MC_END();
10289}
10290
10291
10292/** Opcode 0xdb !11/3. */
10293FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
10294{
10295 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
10296 IEM_MC_BEGIN(3, 2);
10297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10298 IEM_MC_LOCAL(uint16_t, u16Fsw);
10299 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10300 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10301 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10302
10303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10305 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10306 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10307
10308 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10309 IEM_MC_PREPARE_FPU_USAGE();
10310 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10311 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10312 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10313 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10314 } IEM_MC_ELSE() {
10315 IEM_MC_IF_FCW_IM() {
10316 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10317 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10318 } IEM_MC_ENDIF();
10319 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10320 } IEM_MC_ENDIF();
10321 IEM_MC_ADVANCE_RIP_AND_FINISH();
10322
10323 IEM_MC_END();
10324}
10325
10326
10327/** Opcode 0xdb !11/5. */
10328FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
10329{
10330 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
10331
10332 IEM_MC_BEGIN(2, 3);
10333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10334 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10335 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
10336 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10337 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
10338
10339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10341
10342 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10343 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10344 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10345
10346 IEM_MC_PREPARE_FPU_USAGE();
10347 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10348 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
10349 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10350 } IEM_MC_ELSE() {
10351 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10352 } IEM_MC_ENDIF();
10353 IEM_MC_ADVANCE_RIP_AND_FINISH();
10354
10355 IEM_MC_END();
10356}
10357
10358
10359/** Opcode 0xdb !11/7. */
10360FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
10361{
10362 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
10363 IEM_MC_BEGIN(3, 2);
10364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10365 IEM_MC_LOCAL(uint16_t, u16Fsw);
10366 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10367 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
10368 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10369
10370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10372 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10373 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10374
10375 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
10376 IEM_MC_PREPARE_FPU_USAGE();
10377 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10378 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
10379 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
10380 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10381 } IEM_MC_ELSE() {
10382 IEM_MC_IF_FCW_IM() {
10383 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
10384 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
10385 } IEM_MC_ENDIF();
10386 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10387 } IEM_MC_ENDIF();
10388 IEM_MC_ADVANCE_RIP_AND_FINISH();
10389
10390 IEM_MC_END();
10391}
10392
10393
10394/** Opcode 0xdb 11/0. */
10395FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
10396{
10397 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
10398 IEM_MC_BEGIN(0, 1);
10399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10400 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10401
10402 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10403 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10404
10405 IEM_MC_PREPARE_FPU_USAGE();
10406 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10407 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
10408 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10409 } IEM_MC_ENDIF();
10410 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10411 } IEM_MC_ELSE() {
10412 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10413 } IEM_MC_ENDIF();
10414 IEM_MC_ADVANCE_RIP_AND_FINISH();
10415
10416 IEM_MC_END();
10417}
10418
10419
10420/** Opcode 0xdb 11/1. */
10421FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
10422{
10423 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
10424 IEM_MC_BEGIN(0, 1);
10425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10426 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10427
10428 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10429 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10430
10431 IEM_MC_PREPARE_FPU_USAGE();
10432 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10433 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10434 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10435 } IEM_MC_ENDIF();
10436 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10437 } IEM_MC_ELSE() {
10438 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10439 } IEM_MC_ENDIF();
10440 IEM_MC_ADVANCE_RIP_AND_FINISH();
10441
10442 IEM_MC_END();
10443}
10444
10445
10446/** Opcode 0xdb 11/2. */
10447FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
10448{
10449 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
10450 IEM_MC_BEGIN(0, 1);
10451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10452 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10453
10454 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10455 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10456
10457 IEM_MC_PREPARE_FPU_USAGE();
10458 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10459 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10460 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10461 } IEM_MC_ENDIF();
10462 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10463 } IEM_MC_ELSE() {
10464 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10465 } IEM_MC_ENDIF();
10466 IEM_MC_ADVANCE_RIP_AND_FINISH();
10467
10468 IEM_MC_END();
10469}
10470
10471
10472/** Opcode 0xdb 11/3. */
10473FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
10474{
10475 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
10476 IEM_MC_BEGIN(0, 1);
10477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10478 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10479
10480 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10481 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10482
10483 IEM_MC_PREPARE_FPU_USAGE();
10484 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10485 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
10486 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10487 } IEM_MC_ENDIF();
10488 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10489 } IEM_MC_ELSE() {
10490 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10491 } IEM_MC_ENDIF();
10492 IEM_MC_ADVANCE_RIP_AND_FINISH();
10493
10494 IEM_MC_END();
10495}
10496
10497
10498/** Opcode 0xdb 0xe0. */
10499FNIEMOP_DEF(iemOp_fneni)
10500{
10501 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
10502 IEM_MC_BEGIN(0,0);
10503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10504 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10505 IEM_MC_ADVANCE_RIP_AND_FINISH();
10506 IEM_MC_END();
10507}
10508
10509
10510/** Opcode 0xdb 0xe1. */
10511FNIEMOP_DEF(iemOp_fndisi)
10512{
10513 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
10514 IEM_MC_BEGIN(0,0);
10515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10516 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10517 IEM_MC_ADVANCE_RIP_AND_FINISH();
10518 IEM_MC_END();
10519}
10520
10521
10522/** Opcode 0xdb 0xe2. */
10523FNIEMOP_DEF(iemOp_fnclex)
10524{
10525 IEMOP_MNEMONIC(fnclex, "fnclex");
10526 IEM_MC_BEGIN(0,0);
10527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10528 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10529 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10530 IEM_MC_CLEAR_FSW_EX();
10531 IEM_MC_ADVANCE_RIP_AND_FINISH();
10532 IEM_MC_END();
10533}
10534
10535
10536/** Opcode 0xdb 0xe3. */
10537FNIEMOP_DEF(iemOp_fninit)
10538{
10539 IEMOP_MNEMONIC(fninit, "fninit");
10540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10541 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, iemCImpl_finit, false /*fCheckXcpts*/);
10542}
10543
10544
10545/** Opcode 0xdb 0xe4. */
10546FNIEMOP_DEF(iemOp_fnsetpm)
10547{
10548 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
10549 IEM_MC_BEGIN(0,0);
10550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10551 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10552 IEM_MC_ADVANCE_RIP_AND_FINISH();
10553 IEM_MC_END();
10554}
10555
10556
10557/** Opcode 0xdb 0xe5. */
10558FNIEMOP_DEF(iemOp_frstpm)
10559{
10560 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
10561#if 0 /* #UDs on newer CPUs */
10562 IEM_MC_BEGIN(0,0);
10563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10564 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10565 IEM_MC_ADVANCE_RIP_AND_FINISH();
10566 IEM_MC_END();
10567 return VINF_SUCCESS;
10568#else
10569 IEMOP_RAISE_INVALID_OPCODE_RET();
10570#endif
10571}
10572
10573
10574/** Opcode 0xdb 11/5. */
10575FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
10576{
10577 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
10578 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10579 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fucomi_r80_by_r80,
10580 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10581}
10582
10583
10584/** Opcode 0xdb 11/6. */
10585FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
10586{
10587 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
10588 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10589 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
10590 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10591}
10592
10593
10594/**
10595 * @opcode 0xdb
10596 */
10597FNIEMOP_DEF(iemOp_EscF3)
10598{
10599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10600 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
10601 if (IEM_IS_MODRM_REG_MODE(bRm))
10602 {
10603 switch (IEM_GET_MODRM_REG_8(bRm))
10604 {
10605 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
10606 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
10607 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
10608 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
10609 case 4:
10610 switch (bRm)
10611 {
10612 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
10613 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
10614 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
10615 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
10616 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
10617 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
10618 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
10619 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
10620 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10621 }
10622 break;
10623 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
10624 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
10625 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10626 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10627 }
10628 }
10629 else
10630 {
10631 switch (IEM_GET_MODRM_REG_8(bRm))
10632 {
10633 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
10634 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
10635 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
10636 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
10637 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10638 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
10639 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10640 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
10641 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10642 }
10643 }
10644}
10645
10646
10647/**
10648 * Common worker for FPU instructions working on STn and ST0, and storing the
10649 * result in STn unless IE, DE or ZE was raised.
10650 *
10651 * @param bRm Mod R/M byte.
10652 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10653 */
10654FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10655{
10656 IEM_MC_BEGIN(3, 1);
10657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10658 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10659 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10660 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10661 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10662
10663 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10664 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10665
10666 IEM_MC_PREPARE_FPU_USAGE();
10667 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10668 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10669 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10670 } IEM_MC_ELSE() {
10671 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10672 } IEM_MC_ENDIF();
10673 IEM_MC_ADVANCE_RIP_AND_FINISH();
10674
10675 IEM_MC_END();
10676}
10677
10678
10679/** Opcode 0xdc 11/0. */
10680FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
10681{
10682 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
10683 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
10684}
10685
10686
10687/** Opcode 0xdc 11/1. */
10688FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
10689{
10690 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
10691 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
10692}
10693
10694
10695/** Opcode 0xdc 11/4. */
10696FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
10697{
10698 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
10699 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
10700}
10701
10702
10703/** Opcode 0xdc 11/5. */
10704FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
10705{
10706 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
10707 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
10708}
10709
10710
10711/** Opcode 0xdc 11/6. */
10712FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
10713{
10714 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
10715 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
10716}
10717
10718
10719/** Opcode 0xdc 11/7. */
10720FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
10721{
10722 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
10723 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
10724}
10725
10726
10727/**
10728 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
10729 * memory operand, and storing the result in ST0.
10730 *
10731 * @param bRm Mod R/M byte.
10732 * @param pfnImpl Pointer to the instruction implementation (assembly).
10733 */
10734FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
10735{
10736 IEM_MC_BEGIN(3, 3);
10737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10738 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10739 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
10740 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10741 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
10742 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
10743
10744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10746 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10747 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10748
10749 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10750 IEM_MC_PREPARE_FPU_USAGE();
10751 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
10752 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
10753 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10754 } IEM_MC_ELSE() {
10755 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10756 } IEM_MC_ENDIF();
10757 IEM_MC_ADVANCE_RIP_AND_FINISH();
10758
10759 IEM_MC_END();
10760}
10761
10762
10763/** Opcode 0xdc !11/0. */
10764FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
10765{
10766 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
10767 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
10768}
10769
10770
10771/** Opcode 0xdc !11/1. */
10772FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
10773{
10774 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
10775 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
10776}
10777
10778
10779/** Opcode 0xdc !11/2. */
10780FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
10781{
10782 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
10783
10784 IEM_MC_BEGIN(3, 3);
10785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10786 IEM_MC_LOCAL(uint16_t, u16Fsw);
10787 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10788 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10789 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10790 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10791
10792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10794
10795 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10796 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10797 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10798
10799 IEM_MC_PREPARE_FPU_USAGE();
10800 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10801 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10802 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10803 } IEM_MC_ELSE() {
10804 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10805 } IEM_MC_ENDIF();
10806 IEM_MC_ADVANCE_RIP_AND_FINISH();
10807
10808 IEM_MC_END();
10809}
10810
10811
10812/** Opcode 0xdc !11/3. */
10813FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
10814{
10815 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
10816
10817 IEM_MC_BEGIN(3, 3);
10818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10819 IEM_MC_LOCAL(uint16_t, u16Fsw);
10820 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10821 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10822 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10823 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10824
10825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10827
10828 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10829 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10830 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10831
10832 IEM_MC_PREPARE_FPU_USAGE();
10833 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10834 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10835 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10836 } IEM_MC_ELSE() {
10837 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10838 } IEM_MC_ENDIF();
10839 IEM_MC_ADVANCE_RIP_AND_FINISH();
10840
10841 IEM_MC_END();
10842}
10843
10844
10845/** Opcode 0xdc !11/4. */
10846FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
10847{
10848 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
10849 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
10850}
10851
10852
10853/** Opcode 0xdc !11/5. */
10854FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
10855{
10856 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
10857 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
10858}
10859
10860
10861/** Opcode 0xdc !11/6. */
10862FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
10863{
10864 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
10865 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
10866}
10867
10868
10869/** Opcode 0xdc !11/7. */
10870FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
10871{
10872 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
10873 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
10874}
10875
10876
10877/**
10878 * @opcode 0xdc
10879 */
10880FNIEMOP_DEF(iemOp_EscF4)
10881{
10882 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10883 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
10884 if (IEM_IS_MODRM_REG_MODE(bRm))
10885 {
10886 switch (IEM_GET_MODRM_REG_8(bRm))
10887 {
10888 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
10889 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
10890 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
10891 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
10892 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
10893 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
10894 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
10895 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
10896 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10897 }
10898 }
10899 else
10900 {
10901 switch (IEM_GET_MODRM_REG_8(bRm))
10902 {
10903 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
10904 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
10905 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
10906 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
10907 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
10908 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
10909 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
10910 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
10911 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10912 }
10913 }
10914}
10915
10916
10917/** Opcode 0xdd !11/0.
10918 * @sa iemOp_fld_m32r */
10919FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
10920{
10921 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
10922
10923 IEM_MC_BEGIN(2, 3);
10924 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10925 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10926 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
10927 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10928 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
10929
10930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10932 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10933 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10934
10935 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10936 IEM_MC_PREPARE_FPU_USAGE();
10937 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10938 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
10939 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10940 } IEM_MC_ELSE() {
10941 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10942 } IEM_MC_ENDIF();
10943 IEM_MC_ADVANCE_RIP_AND_FINISH();
10944
10945 IEM_MC_END();
10946}
10947
10948
10949/** Opcode 0xdd !11/0. */
10950FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
10951{
10952 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
10953 IEM_MC_BEGIN(3, 2);
10954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10955 IEM_MC_LOCAL(uint16_t, u16Fsw);
10956 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10957 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10958 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10959
10960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10962 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10963 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10964
10965 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10966 IEM_MC_PREPARE_FPU_USAGE();
10967 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10968 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10969 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10970 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10971 } IEM_MC_ELSE() {
10972 IEM_MC_IF_FCW_IM() {
10973 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10974 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10975 } IEM_MC_ENDIF();
10976 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10977 } IEM_MC_ENDIF();
10978 IEM_MC_ADVANCE_RIP_AND_FINISH();
10979
10980 IEM_MC_END();
10981}
10982
10983
10984/** Opcode 0xdd !11/0. */
10985FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
10986{
10987 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
10988 IEM_MC_BEGIN(3, 2);
10989 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10990 IEM_MC_LOCAL(uint16_t, u16Fsw);
10991 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10992 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10993 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10994
10995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10997 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10998 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10999
11000 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11001 IEM_MC_PREPARE_FPU_USAGE();
11002 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11003 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11004 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11005 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11006 } IEM_MC_ELSE() {
11007 IEM_MC_IF_FCW_IM() {
11008 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11009 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11010 } IEM_MC_ENDIF();
11011 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11012 } IEM_MC_ENDIF();
11013 IEM_MC_ADVANCE_RIP_AND_FINISH();
11014
11015 IEM_MC_END();
11016}
11017
11018
11019
11020
11021/** Opcode 0xdd !11/0. */
11022FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
11023{
11024 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
11025 IEM_MC_BEGIN(3, 2);
11026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11027 IEM_MC_LOCAL(uint16_t, u16Fsw);
11028 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11029 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11030 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11031
11032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11034 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11035 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11036
11037 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11038 IEM_MC_PREPARE_FPU_USAGE();
11039 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11040 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11041 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11042 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11043 } IEM_MC_ELSE() {
11044 IEM_MC_IF_FCW_IM() {
11045 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11046 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11047 } IEM_MC_ENDIF();
11048 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11049 } IEM_MC_ENDIF();
11050 IEM_MC_ADVANCE_RIP_AND_FINISH();
11051
11052 IEM_MC_END();
11053}
11054
11055
11056/** Opcode 0xdd !11/0. */
11057FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
11058{
11059 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
11060 IEM_MC_BEGIN(3, 0);
11061 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11062 IEM_MC_ARG(uint8_t, iEffSeg, 1);
11063 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
11064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11066 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11067 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11068 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11069 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
11070 IEM_MC_END();
11071}
11072
11073
11074/** Opcode 0xdd !11/0. */
11075FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
11076{
11077 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
11078 IEM_MC_BEGIN(3, 0);
11079 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11080 IEM_MC_ARG(uint8_t, iEffSeg, 1);
11081 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
11082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11084 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11085 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
11086 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11087 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
11088 IEM_MC_END();
11089}
11090
11091/** Opcode 0xdd !11/0. */
11092FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
11093{
11094 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
11095
11096 IEM_MC_BEGIN(0, 2);
11097 IEM_MC_LOCAL(uint16_t, u16Tmp);
11098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11099
11100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11102 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11103
11104 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11105 IEM_MC_FETCH_FSW(u16Tmp);
11106 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
11107 IEM_MC_ADVANCE_RIP_AND_FINISH();
11108
11109/** @todo Debug / drop a hint to the verifier that things may differ
11110 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
11111 * NT4SP1. (X86_FSW_PE) */
11112 IEM_MC_END();
11113}
11114
11115
11116/** Opcode 0xdd 11/0. */
11117FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
11118{
11119 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
11120 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
11121 unmodified. */
11122 IEM_MC_BEGIN(0, 0);
11123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11124
11125 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11126 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11127
11128 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11129 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11130 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11131
11132 IEM_MC_ADVANCE_RIP_AND_FINISH();
11133 IEM_MC_END();
11134}
11135
11136
11137/** Opcode 0xdd 11/1. */
11138FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
11139{
11140 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
11141 IEM_MC_BEGIN(0, 2);
11142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11143 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11144 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11145 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11146 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11147
11148 IEM_MC_PREPARE_FPU_USAGE();
11149 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11150 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11151 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11152 } IEM_MC_ELSE() {
11153 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11154 } IEM_MC_ENDIF();
11155
11156 IEM_MC_ADVANCE_RIP_AND_FINISH();
11157 IEM_MC_END();
11158}
11159
11160
11161/** Opcode 0xdd 11/3. */
11162FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
11163{
11164 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
11165 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
11166}
11167
11168
11169/** Opcode 0xdd 11/4. */
11170FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
11171{
11172 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
11173 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
11174}
11175
11176
11177/**
11178 * @opcode 0xdd
11179 */
11180FNIEMOP_DEF(iemOp_EscF5)
11181{
11182 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11183 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
11184 if (IEM_IS_MODRM_REG_MODE(bRm))
11185 {
11186 switch (IEM_GET_MODRM_REG_8(bRm))
11187 {
11188 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
11189 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
11190 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
11191 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
11192 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
11193 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
11194 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11195 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11196 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11197 }
11198 }
11199 else
11200 {
11201 switch (IEM_GET_MODRM_REG_8(bRm))
11202 {
11203 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
11204 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
11205 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
11206 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
11207 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
11208 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
11209 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
11210 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
11211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11212 }
11213 }
11214}
11215
11216
11217/** Opcode 0xde 11/0. */
11218FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
11219{
11220 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
11221 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
11222}
11223
11224
11225/** Opcode 0xde 11/0. */
11226FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
11227{
11228 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
11229 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
11230}
11231
11232
11233/** Opcode 0xde 0xd9. */
11234FNIEMOP_DEF(iemOp_fcompp)
11235{
11236 IEMOP_MNEMONIC(fcompp, "fcompp");
11237 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
11238}
11239
11240
11241/** Opcode 0xde 11/4. */
11242FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
11243{
11244 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
11245 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
11246}
11247
11248
11249/** Opcode 0xde 11/5. */
11250FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
11251{
11252 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
11253 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
11254}
11255
11256
11257/** Opcode 0xde 11/6. */
11258FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
11259{
11260 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
11261 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
11262}
11263
11264
11265/** Opcode 0xde 11/7. */
11266FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
11267{
11268 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
11269 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
11270}
11271
11272
11273/**
11274 * Common worker for FPU instructions working on ST0 and an m16i, and storing
11275 * the result in ST0.
11276 *
11277 * @param bRm Mod R/M byte.
11278 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11279 */
11280FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
11281{
11282 IEM_MC_BEGIN(3, 3);
11283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11284 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11285 IEM_MC_LOCAL(int16_t, i16Val2);
11286 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11287 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11288 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11289
11290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11292
11293 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11294 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11295 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11296
11297 IEM_MC_PREPARE_FPU_USAGE();
11298 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11299 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
11300 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11301 } IEM_MC_ELSE() {
11302 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11303 } IEM_MC_ENDIF();
11304 IEM_MC_ADVANCE_RIP_AND_FINISH();
11305
11306 IEM_MC_END();
11307}
11308
11309
11310/** Opcode 0xde !11/0. */
11311FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
11312{
11313 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
11314 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
11315}
11316
11317
11318/** Opcode 0xde !11/1. */
11319FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
11320{
11321 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
11322 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
11323}
11324
11325
11326/** Opcode 0xde !11/2. */
11327FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
11328{
11329 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
11330
11331 IEM_MC_BEGIN(3, 3);
11332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11333 IEM_MC_LOCAL(uint16_t, u16Fsw);
11334 IEM_MC_LOCAL(int16_t, i16Val2);
11335 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11336 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11337 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11338
11339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11341
11342 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11343 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11344 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11345
11346 IEM_MC_PREPARE_FPU_USAGE();
11347 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11348 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11349 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11350 } IEM_MC_ELSE() {
11351 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11352 } IEM_MC_ENDIF();
11353 IEM_MC_ADVANCE_RIP_AND_FINISH();
11354
11355 IEM_MC_END();
11356}
11357
11358
11359/** Opcode 0xde !11/3. */
11360FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
11361{
11362 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
11363
11364 IEM_MC_BEGIN(3, 3);
11365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11366 IEM_MC_LOCAL(uint16_t, u16Fsw);
11367 IEM_MC_LOCAL(int16_t, i16Val2);
11368 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11369 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11370 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11371
11372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11374
11375 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11376 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11377 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11378
11379 IEM_MC_PREPARE_FPU_USAGE();
11380 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11381 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11382 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11383 } IEM_MC_ELSE() {
11384 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11385 } IEM_MC_ENDIF();
11386 IEM_MC_ADVANCE_RIP_AND_FINISH();
11387
11388 IEM_MC_END();
11389}
11390
11391
11392/** Opcode 0xde !11/4. */
11393FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
11394{
11395 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
11396 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
11397}
11398
11399
11400/** Opcode 0xde !11/5. */
11401FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
11402{
11403 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
11404 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
11405}
11406
11407
11408/** Opcode 0xde !11/6. */
11409FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
11410{
11411 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
11412 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
11413}
11414
11415
11416/** Opcode 0xde !11/7. */
11417FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
11418{
11419 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
11420 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
11421}
11422
11423
11424/**
11425 * @opcode 0xde
11426 */
11427FNIEMOP_DEF(iemOp_EscF6)
11428{
11429 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11430 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
11431 if (IEM_IS_MODRM_REG_MODE(bRm))
11432 {
11433 switch (IEM_GET_MODRM_REG_8(bRm))
11434 {
11435 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
11436 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
11437 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
11438 case 3: if (bRm == 0xd9)
11439 return FNIEMOP_CALL(iemOp_fcompp);
11440 IEMOP_RAISE_INVALID_OPCODE_RET();
11441 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
11442 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
11443 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
11444 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
11445 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11446 }
11447 }
11448 else
11449 {
11450 switch (IEM_GET_MODRM_REG_8(bRm))
11451 {
11452 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
11453 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
11454 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
11455 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
11456 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
11457 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
11458 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
11459 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
11460 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11461 }
11462 }
11463}
11464
11465
11466/** Opcode 0xdf 11/0.
11467 * Undocument instruction, assumed to work like ffree + fincstp. */
11468FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
11469{
11470 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
11471 IEM_MC_BEGIN(0, 0);
11472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11473
11474 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11475 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11476
11477 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11478 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11479 IEM_MC_FPU_STACK_INC_TOP();
11480 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11481
11482 IEM_MC_ADVANCE_RIP_AND_FINISH();
11483 IEM_MC_END();
11484}
11485
11486
11487/** Opcode 0xdf 0xe0. */
11488FNIEMOP_DEF(iemOp_fnstsw_ax)
11489{
11490 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
11491 IEM_MC_BEGIN(0, 1);
11492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11493 IEM_MC_LOCAL(uint16_t, u16Tmp);
11494 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11495 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11496 IEM_MC_FETCH_FSW(u16Tmp);
11497 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
11498 IEM_MC_ADVANCE_RIP_AND_FINISH();
11499 IEM_MC_END();
11500}
11501
11502
11503/** Opcode 0xdf 11/5. */
11504FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
11505{
11506 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
11507 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
11508 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
11509 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11510}
11511
11512
11513/** Opcode 0xdf 11/6. */
11514FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
11515{
11516 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
11517 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
11518 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
11519 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11520}
11521
11522
11523/** Opcode 0xdf !11/0. */
11524FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
11525{
11526 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
11527
11528 IEM_MC_BEGIN(2, 3);
11529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11530 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11531 IEM_MC_LOCAL(int16_t, i16Val);
11532 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11533 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
11534
11535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11537
11538 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11539 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11540 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11541
11542 IEM_MC_PREPARE_FPU_USAGE();
11543 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11544 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
11545 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11546 } IEM_MC_ELSE() {
11547 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11548 } IEM_MC_ENDIF();
11549 IEM_MC_ADVANCE_RIP_AND_FINISH();
11550
11551 IEM_MC_END();
11552}
11553
11554
11555/** Opcode 0xdf !11/1. */
11556FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
11557{
11558 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
11559 IEM_MC_BEGIN(3, 2);
11560 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11561 IEM_MC_LOCAL(uint16_t, u16Fsw);
11562 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11563 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11564 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11565
11566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11568 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11569 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11570
11571 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11572 IEM_MC_PREPARE_FPU_USAGE();
11573 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11574 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11575 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11576 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11577 } IEM_MC_ELSE() {
11578 IEM_MC_IF_FCW_IM() {
11579 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11580 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11581 } IEM_MC_ENDIF();
11582 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11583 } IEM_MC_ENDIF();
11584 IEM_MC_ADVANCE_RIP_AND_FINISH();
11585
11586 IEM_MC_END();
11587}
11588
11589
11590/** Opcode 0xdf !11/2. */
11591FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
11592{
11593 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
11594 IEM_MC_BEGIN(3, 2);
11595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11596 IEM_MC_LOCAL(uint16_t, u16Fsw);
11597 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11598 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11599 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11600
11601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11603 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11604 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11605
11606 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11607 IEM_MC_PREPARE_FPU_USAGE();
11608 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11609 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11610 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11611 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11612 } IEM_MC_ELSE() {
11613 IEM_MC_IF_FCW_IM() {
11614 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11615 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11616 } IEM_MC_ENDIF();
11617 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11618 } IEM_MC_ENDIF();
11619 IEM_MC_ADVANCE_RIP_AND_FINISH();
11620
11621 IEM_MC_END();
11622}
11623
11624
11625/** Opcode 0xdf !11/3. */
11626FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
11627{
11628 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
11629 IEM_MC_BEGIN(3, 2);
11630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11631 IEM_MC_LOCAL(uint16_t, u16Fsw);
11632 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11633 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11634 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11635
11636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11638 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11639 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11640
11641 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11642 IEM_MC_PREPARE_FPU_USAGE();
11643 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11644 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11645 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11646 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11647 } IEM_MC_ELSE() {
11648 IEM_MC_IF_FCW_IM() {
11649 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11650 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11651 } IEM_MC_ENDIF();
11652 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11653 } IEM_MC_ENDIF();
11654 IEM_MC_ADVANCE_RIP_AND_FINISH();
11655
11656 IEM_MC_END();
11657}
11658
11659
11660/** Opcode 0xdf !11/4. */
11661FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
11662{
11663 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
11664
11665 IEM_MC_BEGIN(2, 3);
11666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11667 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11668 IEM_MC_LOCAL(RTPBCD80U, d80Val);
11669 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11670 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
11671
11672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11674
11675 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11676 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11677 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11678
11679 IEM_MC_PREPARE_FPU_USAGE();
11680 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11681 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
11682 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11683 } IEM_MC_ELSE() {
11684 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11685 } IEM_MC_ENDIF();
11686 IEM_MC_ADVANCE_RIP_AND_FINISH();
11687
11688 IEM_MC_END();
11689}
11690
11691
11692/** Opcode 0xdf !11/5. */
11693FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
11694{
11695 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
11696
11697 IEM_MC_BEGIN(2, 3);
11698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11699 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11700 IEM_MC_LOCAL(int64_t, i64Val);
11701 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11702 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
11703
11704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11706
11707 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11708 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11709 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11710
11711 IEM_MC_PREPARE_FPU_USAGE();
11712 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11713 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
11714 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11715 } IEM_MC_ELSE() {
11716 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11717 } IEM_MC_ENDIF();
11718 IEM_MC_ADVANCE_RIP_AND_FINISH();
11719
11720 IEM_MC_END();
11721}
11722
11723
11724/** Opcode 0xdf !11/6. */
11725FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
11726{
11727 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
11728 IEM_MC_BEGIN(3, 2);
11729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11730 IEM_MC_LOCAL(uint16_t, u16Fsw);
11731 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11732 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
11733 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11734
11735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11737 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11738 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11739
11740 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
11741 IEM_MC_PREPARE_FPU_USAGE();
11742 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11743 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
11744 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
11745 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11746 } IEM_MC_ELSE() {
11747 IEM_MC_IF_FCW_IM() {
11748 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
11749 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
11750 } IEM_MC_ENDIF();
11751 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11752 } IEM_MC_ENDIF();
11753 IEM_MC_ADVANCE_RIP_AND_FINISH();
11754
11755 IEM_MC_END();
11756}
11757
11758
11759/** Opcode 0xdf !11/7. */
11760FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
11761{
11762 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
11763 IEM_MC_BEGIN(3, 2);
11764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11765 IEM_MC_LOCAL(uint16_t, u16Fsw);
11766 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11767 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11768 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11769
11770 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11772 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11773 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11774
11775 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11776 IEM_MC_PREPARE_FPU_USAGE();
11777 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11778 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11779 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11780 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11781 } IEM_MC_ELSE() {
11782 IEM_MC_IF_FCW_IM() {
11783 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11784 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
11785 } IEM_MC_ENDIF();
11786 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11787 } IEM_MC_ENDIF();
11788 IEM_MC_ADVANCE_RIP_AND_FINISH();
11789
11790 IEM_MC_END();
11791}
11792
11793
11794/**
11795 * @opcode 0xdf
11796 */
11797FNIEMOP_DEF(iemOp_EscF7)
11798{
11799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11800 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
11801 if (IEM_IS_MODRM_REG_MODE(bRm))
11802 {
11803 switch (IEM_GET_MODRM_REG_8(bRm))
11804 {
11805 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
11806 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
11807 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11808 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11809 case 4: if (bRm == 0xe0)
11810 return FNIEMOP_CALL(iemOp_fnstsw_ax);
11811 IEMOP_RAISE_INVALID_OPCODE_RET();
11812 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
11813 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
11814 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11815 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11816 }
11817 }
11818 else
11819 {
11820 switch (IEM_GET_MODRM_REG_8(bRm))
11821 {
11822 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
11823 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
11824 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
11825 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
11826 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
11827 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
11828 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
11829 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
11830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11831 }
11832 }
11833}
11834
11835
11836/**
11837 * @opcode 0xe0
11838 */
11839FNIEMOP_DEF(iemOp_loopne_Jb)
11840{
11841 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
11842 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11843 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11844
11845 switch (pVCpu->iem.s.enmEffAddrMode)
11846 {
11847 case IEMMODE_16BIT:
11848 IEM_MC_BEGIN(0,0);
11849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11850 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11851 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11852 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11853 } IEM_MC_ELSE() {
11854 IEM_MC_ADVANCE_RIP_AND_FINISH();
11855 } IEM_MC_ENDIF();
11856 IEM_MC_END();
11857 break;
11858
11859 case IEMMODE_32BIT:
11860 IEM_MC_BEGIN(0,0);
11861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11862 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11863 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11864 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11865 } IEM_MC_ELSE() {
11866 IEM_MC_ADVANCE_RIP_AND_FINISH();
11867 } IEM_MC_ENDIF();
11868 IEM_MC_END();
11869 break;
11870
11871 case IEMMODE_64BIT:
11872 IEM_MC_BEGIN(0,0);
11873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11874 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11875 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11876 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11877 } IEM_MC_ELSE() {
11878 IEM_MC_ADVANCE_RIP_AND_FINISH();
11879 } IEM_MC_ENDIF();
11880 IEM_MC_END();
11881 break;
11882
11883 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11884 }
11885}
11886
11887
11888/**
11889 * @opcode 0xe1
11890 */
11891FNIEMOP_DEF(iemOp_loope_Jb)
11892{
11893 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
11894 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11895 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11896
11897 switch (pVCpu->iem.s.enmEffAddrMode)
11898 {
11899 case IEMMODE_16BIT:
11900 IEM_MC_BEGIN(0,0);
11901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11902 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11903 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11904 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11905 } IEM_MC_ELSE() {
11906 IEM_MC_ADVANCE_RIP_AND_FINISH();
11907 } IEM_MC_ENDIF();
11908 IEM_MC_END();
11909 break;
11910
11911 case IEMMODE_32BIT:
11912 IEM_MC_BEGIN(0,0);
11913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11914 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11915 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11916 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11917 } IEM_MC_ELSE() {
11918 IEM_MC_ADVANCE_RIP_AND_FINISH();
11919 } IEM_MC_ENDIF();
11920 IEM_MC_END();
11921 break;
11922
11923 case IEMMODE_64BIT:
11924 IEM_MC_BEGIN(0,0);
11925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11926 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11927 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11928 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11929 } IEM_MC_ELSE() {
11930 IEM_MC_ADVANCE_RIP_AND_FINISH();
11931 } IEM_MC_ENDIF();
11932 IEM_MC_END();
11933 break;
11934
11935 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11936 }
11937}
11938
11939
11940/**
11941 * @opcode 0xe2
11942 */
11943FNIEMOP_DEF(iemOp_loop_Jb)
11944{
11945 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
11946 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11947 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11948
11949 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
11950 * using the 32-bit operand size override. How can that be restarted? See
11951 * weird pseudo code in intel manual. */
11952
11953 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
11954 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
11955 * the loop causes guest crashes, but when logging it's nice to skip a few million
11956 * lines of useless output. */
11957#if defined(LOG_ENABLED)
11958 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
11959 switch (pVCpu->iem.s.enmEffAddrMode)
11960 {
11961 case IEMMODE_16BIT:
11962 IEM_MC_BEGIN(0,0);
11963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11964 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
11965 IEM_MC_ADVANCE_RIP_AND_FINISH();
11966 IEM_MC_END();
11967 break;
11968
11969 case IEMMODE_32BIT:
11970 IEM_MC_BEGIN(0,0);
11971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11972 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
11973 IEM_MC_ADVANCE_RIP_AND_FINISH();
11974 IEM_MC_END();
11975 break;
11976
11977 case IEMMODE_64BIT:
11978 IEM_MC_BEGIN(0,0);
11979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11980 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
11981 IEM_MC_ADVANCE_RIP_AND_FINISH();
11982 IEM_MC_END();
11983 break;
11984
11985 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11986 }
11987#endif
11988
11989 switch (pVCpu->iem.s.enmEffAddrMode)
11990 {
11991 case IEMMODE_16BIT:
11992 IEM_MC_BEGIN(0,0);
11993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11994 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11995 IEM_MC_IF_CX_IS_NZ() {
11996 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11997 } IEM_MC_ELSE() {
11998 IEM_MC_ADVANCE_RIP_AND_FINISH();
11999 } IEM_MC_ENDIF();
12000 IEM_MC_END();
12001 break;
12002
12003 case IEMMODE_32BIT:
12004 IEM_MC_BEGIN(0,0);
12005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12006 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12007 IEM_MC_IF_ECX_IS_NZ() {
12008 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12009 } IEM_MC_ELSE() {
12010 IEM_MC_ADVANCE_RIP_AND_FINISH();
12011 } IEM_MC_ENDIF();
12012 IEM_MC_END();
12013 break;
12014
12015 case IEMMODE_64BIT:
12016 IEM_MC_BEGIN(0,0);
12017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12018 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12019 IEM_MC_IF_RCX_IS_NZ() {
12020 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12021 } IEM_MC_ELSE() {
12022 IEM_MC_ADVANCE_RIP_AND_FINISH();
12023 } IEM_MC_ENDIF();
12024 IEM_MC_END();
12025 break;
12026
12027 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12028 }
12029}
12030
12031
12032/**
12033 * @opcode 0xe3
12034 */
12035FNIEMOP_DEF(iemOp_jecxz_Jb)
12036{
12037 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
12038 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12039 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12040
12041 switch (pVCpu->iem.s.enmEffAddrMode)
12042 {
12043 case IEMMODE_16BIT:
12044 IEM_MC_BEGIN(0,0);
12045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12046 IEM_MC_IF_CX_IS_NZ() {
12047 IEM_MC_ADVANCE_RIP_AND_FINISH();
12048 } IEM_MC_ELSE() {
12049 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12050 } IEM_MC_ENDIF();
12051 IEM_MC_END();
12052 break;
12053
12054 case IEMMODE_32BIT:
12055 IEM_MC_BEGIN(0,0);
12056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12057 IEM_MC_IF_ECX_IS_NZ() {
12058 IEM_MC_ADVANCE_RIP_AND_FINISH();
12059 } IEM_MC_ELSE() {
12060 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12061 } IEM_MC_ENDIF();
12062 IEM_MC_END();
12063 break;
12064
12065 case IEMMODE_64BIT:
12066 IEM_MC_BEGIN(0,0);
12067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12068 IEM_MC_IF_RCX_IS_NZ() {
12069 IEM_MC_ADVANCE_RIP_AND_FINISH();
12070 } IEM_MC_ELSE() {
12071 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12072 } IEM_MC_ENDIF();
12073 IEM_MC_END();
12074 break;
12075
12076 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12077 }
12078}
12079
12080
12081/** Opcode 0xe4 */
12082FNIEMOP_DEF(iemOp_in_AL_Ib)
12083{
12084 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
12085 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12087 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12088 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12089}
12090
12091
12092/** Opcode 0xe5 */
12093FNIEMOP_DEF(iemOp_in_eAX_Ib)
12094{
12095 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
12096 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12098 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12099 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12100 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12101}
12102
12103
12104/** Opcode 0xe6 */
12105FNIEMOP_DEF(iemOp_out_Ib_AL)
12106{
12107 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
12108 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12110 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12111 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12112}
12113
12114
12115/** Opcode 0xe7 */
12116FNIEMOP_DEF(iemOp_out_Ib_eAX)
12117{
12118 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
12119 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12121 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12122 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12123 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12124}
12125
12126
12127/**
12128 * @opcode 0xe8
12129 */
12130FNIEMOP_DEF(iemOp_call_Jv)
12131{
12132 IEMOP_MNEMONIC(call_Jv, "call Jv");
12133 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12134 switch (pVCpu->iem.s.enmEffOpSize)
12135 {
12136 case IEMMODE_16BIT:
12137 {
12138 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12139 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_16, (int16_t)u16Imm);
12140 }
12141
12142 case IEMMODE_32BIT:
12143 {
12144 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12145 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_32, (int32_t)u32Imm);
12146 }
12147
12148 case IEMMODE_64BIT:
12149 {
12150 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12151 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_64, u64Imm);
12152 }
12153
12154 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12155 }
12156}
12157
12158
12159/**
12160 * @opcode 0xe9
12161 */
12162FNIEMOP_DEF(iemOp_jmp_Jv)
12163{
12164 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
12165 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12166 switch (pVCpu->iem.s.enmEffOpSize)
12167 {
12168 case IEMMODE_16BIT:
12169 {
12170 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
12171 IEM_MC_BEGIN(0, 0);
12172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12173 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
12174 IEM_MC_END();
12175 break;
12176 }
12177
12178 case IEMMODE_64BIT:
12179 case IEMMODE_32BIT:
12180 {
12181 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
12182 IEM_MC_BEGIN(0, 0);
12183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12184 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
12185 IEM_MC_END();
12186 break;
12187 }
12188
12189 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12190 }
12191}
12192
12193
12194/**
12195 * @opcode 0xea
12196 */
12197FNIEMOP_DEF(iemOp_jmp_Ap)
12198{
12199 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
12200 IEMOP_HLP_NO_64BIT();
12201
12202 /* Decode the far pointer address and pass it on to the far call C implementation. */
12203 uint32_t off32Seg;
12204 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12205 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
12206 else
12207 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
12208 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
12209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12210 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
12211 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12212 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
12213}
12214
12215
12216/**
12217 * @opcode 0xeb
12218 */
12219FNIEMOP_DEF(iemOp_jmp_Jb)
12220{
12221 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
12222 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12223 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12224
12225 IEM_MC_BEGIN(0, 0);
12226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12227 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12228 IEM_MC_END();
12229}
12230
12231
12232/** Opcode 0xec */
12233FNIEMOP_DEF(iemOp_in_AL_DX)
12234{
12235 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
12236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12237 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12238 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
12239}
12240
12241
12242/** Opcode 0xed */
12243FNIEMOP_DEF(iemOp_in_eAX_DX)
12244{
12245 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
12246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12247 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12248 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12249 pVCpu->iem.s.enmEffAddrMode);
12250}
12251
12252
12253/** Opcode 0xee */
12254FNIEMOP_DEF(iemOp_out_DX_AL)
12255{
12256 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
12257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12258 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12259 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
12260}
12261
12262
12263/** Opcode 0xef */
12264FNIEMOP_DEF(iemOp_out_DX_eAX)
12265{
12266 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
12267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12268 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12269 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12270 pVCpu->iem.s.enmEffAddrMode);
12271}
12272
12273
12274/**
12275 * @opcode 0xf0
12276 */
12277FNIEMOP_DEF(iemOp_lock)
12278{
12279 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
12280 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12281 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
12282
12283 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12284 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12285}
12286
12287
12288/**
12289 * @opcode 0xf1
12290 */
12291FNIEMOP_DEF(iemOp_int1)
12292{
12293 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
12294 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
12295 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
12296 * LOADALL memo. Needs some testing. */
12297 IEMOP_HLP_MIN_386();
12298 /** @todo testcase! */
12299 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
12300 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
12301 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
12302}
12303
12304
12305/**
12306 * @opcode 0xf2
12307 */
12308FNIEMOP_DEF(iemOp_repne)
12309{
12310 /* This overrides any previous REPE prefix. */
12311 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
12312 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
12313 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
12314
12315 /* For the 4 entry opcode tables, REPNZ overrides any previous
12316 REPZ and operand size prefixes. */
12317 pVCpu->iem.s.idxPrefix = 3;
12318
12319 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12320 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12321}
12322
12323
12324/**
12325 * @opcode 0xf3
12326 */
12327FNIEMOP_DEF(iemOp_repe)
12328{
12329 /* This overrides any previous REPNE prefix. */
12330 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
12331 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
12332 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
12333
12334 /* For the 4 entry opcode tables, REPNZ overrides any previous
12335 REPNZ and operand size prefixes. */
12336 pVCpu->iem.s.idxPrefix = 2;
12337
12338 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12339 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12340}
12341
12342
12343/**
12344 * @opcode 0xf4
12345 */
12346FNIEMOP_DEF(iemOp_hlt)
12347{
12348 IEMOP_MNEMONIC(hlt, "hlt");
12349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12350 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, iemCImpl_hlt);
12351}
12352
12353
12354/**
12355 * @opcode 0xf5
12356 */
12357FNIEMOP_DEF(iemOp_cmc)
12358{
12359 IEMOP_MNEMONIC(cmc, "cmc");
12360 IEM_MC_BEGIN(0, 0);
12361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12362 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
12363 IEM_MC_ADVANCE_RIP_AND_FINISH();
12364 IEM_MC_END();
12365}
12366
12367
12368/**
12369 * Body for of 'inc/dec/not/neg Eb'.
12370 */
12371#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
12372 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
12373 { \
12374 /* register access */ \
12375 IEM_MC_BEGIN(2, 0); \
12376 IEMOP_HLP_DONE_DECODING(); \
12377 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12378 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12379 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
12380 IEM_MC_REF_EFLAGS(pEFlags); \
12381 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12382 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12383 IEM_MC_END(); \
12384 } \
12385 else \
12386 { \
12387 /* memory access. */ \
12388 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
12389 { \
12390 IEM_MC_BEGIN(2, 2); \
12391 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12392 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12394 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12395 \
12396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12397 IEMOP_HLP_DONE_DECODING(); \
12398 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12399 IEM_MC_FETCH_EFLAGS(EFlags); \
12400 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12401 \
12402 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12403 IEM_MC_COMMIT_EFLAGS(EFlags); \
12404 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12405 IEM_MC_END(); \
12406 } \
12407 else \
12408 { \
12409 IEM_MC_BEGIN(2, 2); \
12410 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12411 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12413 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12414 \
12415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12416 IEMOP_HLP_DONE_DECODING(); \
12417 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12418 IEM_MC_FETCH_EFLAGS(EFlags); \
12419 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
12420 \
12421 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12422 IEM_MC_COMMIT_EFLAGS(EFlags); \
12423 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12424 IEM_MC_END(); \
12425 } \
12426 } \
12427 (void)0
12428
12429
12430/**
12431 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
12432 */
12433#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
12434 if (IEM_IS_MODRM_REG_MODE(bRm)) \
12435 { \
12436 /* \
12437 * Register target \
12438 */ \
12439 switch (pVCpu->iem.s.enmEffOpSize) \
12440 { \
12441 case IEMMODE_16BIT: \
12442 IEM_MC_BEGIN(2, 0); \
12443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12444 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12445 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12446 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12447 IEM_MC_REF_EFLAGS(pEFlags); \
12448 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
12449 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12450 IEM_MC_END(); \
12451 break; \
12452 \
12453 case IEMMODE_32BIT: \
12454 IEM_MC_BEGIN(2, 0); \
12455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12456 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12457 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12458 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12459 IEM_MC_REF_EFLAGS(pEFlags); \
12460 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
12461 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
12462 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12463 IEM_MC_END(); \
12464 break; \
12465 \
12466 case IEMMODE_64BIT: \
12467 IEM_MC_BEGIN(2, 0); \
12468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12469 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12470 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12471 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12472 IEM_MC_REF_EFLAGS(pEFlags); \
12473 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
12474 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12475 IEM_MC_END(); \
12476 break; \
12477 \
12478 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12479 } \
12480 } \
12481 else \
12482 { \
12483 /* \
12484 * Memory target. \
12485 */ \
12486 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
12487 { \
12488 switch (pVCpu->iem.s.enmEffOpSize) \
12489 { \
12490 case IEMMODE_16BIT: \
12491 IEM_MC_BEGIN(2, 2); \
12492 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12493 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12495 \
12496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12498 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
12499 IEM_MC_FETCH_EFLAGS(EFlags); \
12500 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
12501 \
12502 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
12503 IEM_MC_COMMIT_EFLAGS(EFlags); \
12504 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12505 IEM_MC_END(); \
12506 break; \
12507 \
12508 case IEMMODE_32BIT: \
12509 IEM_MC_BEGIN(2, 2); \
12510 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12511 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12513 \
12514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12516 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
12517 IEM_MC_FETCH_EFLAGS(EFlags); \
12518 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
12519 \
12520 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
12521 IEM_MC_COMMIT_EFLAGS(EFlags); \
12522 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12523 IEM_MC_END(); \
12524 break; \
12525 \
12526 case IEMMODE_64BIT: \
12527 IEM_MC_BEGIN(2, 2); \
12528 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12529 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12530 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12531 \
12532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12534 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
12535 IEM_MC_FETCH_EFLAGS(EFlags); \
12536 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
12537 \
12538 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
12539 IEM_MC_COMMIT_EFLAGS(EFlags); \
12540 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12541 IEM_MC_END(); \
12542 break; \
12543 \
12544 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12545 } \
12546 } \
12547 else \
12548 { \
12549 (void)0
12550
12551#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
12552 switch (pVCpu->iem.s.enmEffOpSize) \
12553 { \
12554 case IEMMODE_16BIT: \
12555 IEM_MC_BEGIN(2, 2); \
12556 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12557 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12559 \
12560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12561 IEMOP_HLP_DONE_DECODING(); \
12562 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
12563 IEM_MC_FETCH_EFLAGS(EFlags); \
12564 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
12565 \
12566 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
12567 IEM_MC_COMMIT_EFLAGS(EFlags); \
12568 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12569 IEM_MC_END(); \
12570 break; \
12571 \
12572 case IEMMODE_32BIT: \
12573 IEM_MC_BEGIN(2, 2); \
12574 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12575 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12577 \
12578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12579 IEMOP_HLP_DONE_DECODING(); \
12580 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
12581 IEM_MC_FETCH_EFLAGS(EFlags); \
12582 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
12583 \
12584 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
12585 IEM_MC_COMMIT_EFLAGS(EFlags); \
12586 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12587 IEM_MC_END(); \
12588 break; \
12589 \
12590 case IEMMODE_64BIT: \
12591 IEM_MC_BEGIN(2, 2); \
12592 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12593 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12595 \
12596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12597 IEMOP_HLP_DONE_DECODING(); \
12598 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
12599 IEM_MC_FETCH_EFLAGS(EFlags); \
12600 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
12601 \
12602 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
12603 IEM_MC_COMMIT_EFLAGS(EFlags); \
12604 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12605 IEM_MC_END(); \
12606 break; \
12607 \
12608 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12609 } \
12610 } \
12611 } \
12612 (void)0
12613
12614
12615/**
12616 * @opmaps grp3_f6
12617 * @opcode /0
12618 * @todo also /1
12619 */
12620FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
12621{
12622 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
12623 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12624
12625 if (IEM_IS_MODRM_REG_MODE(bRm))
12626 {
12627 /* register access */
12628 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12629 IEM_MC_BEGIN(3, 0);
12630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12631 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12632 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
12633 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12634 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12635 IEM_MC_REF_EFLAGS(pEFlags);
12636 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
12637 IEM_MC_ADVANCE_RIP_AND_FINISH();
12638 IEM_MC_END();
12639 }
12640 else
12641 {
12642 /* memory access. */
12643 IEM_MC_BEGIN(3, 3);
12644 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
12645 IEM_MC_ARG(uint8_t, u8Src, 1);
12646 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12648 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12649
12650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12651 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12652 IEM_MC_ASSIGN(u8Src, u8Imm);
12653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12654 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12655 IEM_MC_FETCH_EFLAGS(EFlags);
12656 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
12657
12658 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo);
12659 IEM_MC_COMMIT_EFLAGS(EFlags);
12660 IEM_MC_ADVANCE_RIP_AND_FINISH();
12661 IEM_MC_END();
12662 }
12663}
12664
12665
12666/** Opcode 0xf6 /4, /5, /6 and /7. */
12667FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
12668{
12669 if (IEM_IS_MODRM_REG_MODE(bRm))
12670 {
12671 /* register access */
12672 IEM_MC_BEGIN(3, 1);
12673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12674 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12675 IEM_MC_ARG(uint8_t, u8Value, 1);
12676 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12677 IEM_MC_LOCAL(int32_t, rc);
12678
12679 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12680 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12681 IEM_MC_REF_EFLAGS(pEFlags);
12682 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12683 IEM_MC_IF_LOCAL_IS_Z(rc) {
12684 IEM_MC_ADVANCE_RIP_AND_FINISH();
12685 } IEM_MC_ELSE() {
12686 IEM_MC_RAISE_DIVIDE_ERROR();
12687 } IEM_MC_ENDIF();
12688
12689 IEM_MC_END();
12690 }
12691 else
12692 {
12693 /* memory access. */
12694 IEM_MC_BEGIN(3, 2);
12695 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12696 IEM_MC_ARG(uint8_t, u8Value, 1);
12697 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12699 IEM_MC_LOCAL(int32_t, rc);
12700
12701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12703 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12704 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12705 IEM_MC_REF_EFLAGS(pEFlags);
12706 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12707 IEM_MC_IF_LOCAL_IS_Z(rc) {
12708 IEM_MC_ADVANCE_RIP_AND_FINISH();
12709 } IEM_MC_ELSE() {
12710 IEM_MC_RAISE_DIVIDE_ERROR();
12711 } IEM_MC_ENDIF();
12712
12713 IEM_MC_END();
12714 }
12715}
12716
12717
12718/** Opcode 0xf7 /4, /5, /6 and /7. */
12719FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
12720{
12721 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12722
12723 if (IEM_IS_MODRM_REG_MODE(bRm))
12724 {
12725 /* register access */
12726 switch (pVCpu->iem.s.enmEffOpSize)
12727 {
12728 case IEMMODE_16BIT:
12729 {
12730 IEM_MC_BEGIN(4, 1);
12731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12732 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12733 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12734 IEM_MC_ARG(uint16_t, u16Value, 2);
12735 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12736 IEM_MC_LOCAL(int32_t, rc);
12737
12738 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12739 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12740 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12741 IEM_MC_REF_EFLAGS(pEFlags);
12742 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12743 IEM_MC_IF_LOCAL_IS_Z(rc) {
12744 IEM_MC_ADVANCE_RIP_AND_FINISH();
12745 } IEM_MC_ELSE() {
12746 IEM_MC_RAISE_DIVIDE_ERROR();
12747 } IEM_MC_ENDIF();
12748
12749 IEM_MC_END();
12750 break;
12751 }
12752
12753 case IEMMODE_32BIT:
12754 {
12755 IEM_MC_BEGIN(4, 1);
12756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12757 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12758 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12759 IEM_MC_ARG(uint32_t, u32Value, 2);
12760 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12761 IEM_MC_LOCAL(int32_t, rc);
12762
12763 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12764 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12765 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12766 IEM_MC_REF_EFLAGS(pEFlags);
12767 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12768 IEM_MC_IF_LOCAL_IS_Z(rc) {
12769 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12770 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12771 IEM_MC_ADVANCE_RIP_AND_FINISH();
12772 } IEM_MC_ELSE() {
12773 IEM_MC_RAISE_DIVIDE_ERROR();
12774 } IEM_MC_ENDIF();
12775
12776 IEM_MC_END();
12777 break;
12778 }
12779
12780 case IEMMODE_64BIT:
12781 {
12782 IEM_MC_BEGIN(4, 1);
12783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12784 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12785 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12786 IEM_MC_ARG(uint64_t, u64Value, 2);
12787 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12788 IEM_MC_LOCAL(int32_t, rc);
12789
12790 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12791 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12792 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12793 IEM_MC_REF_EFLAGS(pEFlags);
12794 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12795 IEM_MC_IF_LOCAL_IS_Z(rc) {
12796 IEM_MC_ADVANCE_RIP_AND_FINISH();
12797 } IEM_MC_ELSE() {
12798 IEM_MC_RAISE_DIVIDE_ERROR();
12799 } IEM_MC_ENDIF();
12800
12801 IEM_MC_END();
12802 break;
12803 }
12804
12805 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12806 }
12807 }
12808 else
12809 {
12810 /* memory access. */
12811 switch (pVCpu->iem.s.enmEffOpSize)
12812 {
12813 case IEMMODE_16BIT:
12814 {
12815 IEM_MC_BEGIN(4, 2);
12816 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12817 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12818 IEM_MC_ARG(uint16_t, u16Value, 2);
12819 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12821 IEM_MC_LOCAL(int32_t, rc);
12822
12823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12825 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12826 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12827 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12828 IEM_MC_REF_EFLAGS(pEFlags);
12829 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12830 IEM_MC_IF_LOCAL_IS_Z(rc) {
12831 IEM_MC_ADVANCE_RIP_AND_FINISH();
12832 } IEM_MC_ELSE() {
12833 IEM_MC_RAISE_DIVIDE_ERROR();
12834 } IEM_MC_ENDIF();
12835
12836 IEM_MC_END();
12837 break;
12838 }
12839
12840 case IEMMODE_32BIT:
12841 {
12842 IEM_MC_BEGIN(4, 2);
12843 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12844 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12845 IEM_MC_ARG(uint32_t, u32Value, 2);
12846 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12848 IEM_MC_LOCAL(int32_t, rc);
12849
12850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12852 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12853 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12854 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12855 IEM_MC_REF_EFLAGS(pEFlags);
12856 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12857 IEM_MC_IF_LOCAL_IS_Z(rc) {
12858 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12859 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12860 IEM_MC_ADVANCE_RIP_AND_FINISH();
12861 } IEM_MC_ELSE() {
12862 IEM_MC_RAISE_DIVIDE_ERROR();
12863 } IEM_MC_ENDIF();
12864
12865 IEM_MC_END();
12866 break;
12867 }
12868
12869 case IEMMODE_64BIT:
12870 {
12871 IEM_MC_BEGIN(4, 2);
12872 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12873 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12874 IEM_MC_ARG(uint64_t, u64Value, 2);
12875 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12876 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12877 IEM_MC_LOCAL(int32_t, rc);
12878
12879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12881 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12882 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12883 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12884 IEM_MC_REF_EFLAGS(pEFlags);
12885 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12886 IEM_MC_IF_LOCAL_IS_Z(rc) {
12887 IEM_MC_ADVANCE_RIP_AND_FINISH();
12888 } IEM_MC_ELSE() {
12889 IEM_MC_RAISE_DIVIDE_ERROR();
12890 } IEM_MC_ENDIF();
12891
12892 IEM_MC_END();
12893 break;
12894 }
12895
12896 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12897 }
12898 }
12899}
12900
12901
12902/**
12903 * @opmaps grp3_f6
12904 * @opcode /2
12905 */
12906FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
12907{
12908 IEMOP_MNEMONIC(not_Eb, "not Eb");
12909 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
12910}
12911
12912
12913/**
12914 * @opmaps grp3_f6
12915 * @opcode /3
12916 */
12917FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
12918{
12919 IEMOP_MNEMONIC(net_Eb, "neg Eb");
12920 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
12921}
12922
12923
12924/**
12925 * @opcode 0xf6
12926 */
12927FNIEMOP_DEF(iemOp_Grp3_Eb)
12928{
12929 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12930 switch (IEM_GET_MODRM_REG_8(bRm))
12931 {
12932 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12933 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12934 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
12935 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
12936 case 4:
12937 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
12938 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12939 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
12940 case 5:
12941 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
12942 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12943 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
12944 case 6:
12945 IEMOP_MNEMONIC(div_Eb, "div Eb");
12946 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12947 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
12948 case 7:
12949 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
12950 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12951 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
12952 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12953 }
12954}
12955
12956
12957/** Opcode 0xf7 /0. */
12958FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
12959{
12960 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
12961 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12962
12963 if (IEM_IS_MODRM_REG_MODE(bRm))
12964 {
12965 /* register access */
12966 switch (pVCpu->iem.s.enmEffOpSize)
12967 {
12968 case IEMMODE_16BIT:
12969 {
12970 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12971 IEM_MC_BEGIN(3, 0);
12972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12973 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12974 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
12975 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12976 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12977 IEM_MC_REF_EFLAGS(pEFlags);
12978 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12979 IEM_MC_ADVANCE_RIP_AND_FINISH();
12980 IEM_MC_END();
12981 break;
12982 }
12983
12984 case IEMMODE_32BIT:
12985 {
12986 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12987 IEM_MC_BEGIN(3, 0);
12988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12989 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12990 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
12991 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12992 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12993 IEM_MC_REF_EFLAGS(pEFlags);
12994 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
12995 /* No clearing the high dword here - test doesn't write back the result. */
12996 IEM_MC_ADVANCE_RIP_AND_FINISH();
12997 IEM_MC_END();
12998 break;
12999 }
13000
13001 case IEMMODE_64BIT:
13002 {
13003 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13004 IEM_MC_BEGIN(3, 0);
13005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13006 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13007 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
13008 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13009 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13010 IEM_MC_REF_EFLAGS(pEFlags);
13011 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13012 IEM_MC_ADVANCE_RIP_AND_FINISH();
13013 IEM_MC_END();
13014 break;
13015 }
13016
13017 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13018 }
13019 }
13020 else
13021 {
13022 /* memory access. */
13023 switch (pVCpu->iem.s.enmEffOpSize)
13024 {
13025 case IEMMODE_16BIT:
13026 {
13027 IEM_MC_BEGIN(3, 2);
13028 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13029 IEM_MC_ARG(uint16_t, u16Src, 1);
13030 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13032
13033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13034 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13035 IEM_MC_ASSIGN(u16Src, u16Imm);
13036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13037 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13038 IEM_MC_FETCH_EFLAGS(EFlags);
13039 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13040
13041 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
13042 IEM_MC_COMMIT_EFLAGS(EFlags);
13043 IEM_MC_ADVANCE_RIP_AND_FINISH();
13044 IEM_MC_END();
13045 break;
13046 }
13047
13048 case IEMMODE_32BIT:
13049 {
13050 IEM_MC_BEGIN(3, 2);
13051 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13052 IEM_MC_ARG(uint32_t, u32Src, 1);
13053 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13054 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13055
13056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13057 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13058 IEM_MC_ASSIGN(u32Src, u32Imm);
13059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13060 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13061 IEM_MC_FETCH_EFLAGS(EFlags);
13062 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13063
13064 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
13065 IEM_MC_COMMIT_EFLAGS(EFlags);
13066 IEM_MC_ADVANCE_RIP_AND_FINISH();
13067 IEM_MC_END();
13068 break;
13069 }
13070
13071 case IEMMODE_64BIT:
13072 {
13073 IEM_MC_BEGIN(3, 2);
13074 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13075 IEM_MC_ARG(uint64_t, u64Src, 1);
13076 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13078
13079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13080 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13081 IEM_MC_ASSIGN(u64Src, u64Imm);
13082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13083 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13084 IEM_MC_FETCH_EFLAGS(EFlags);
13085 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13086
13087 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
13088 IEM_MC_COMMIT_EFLAGS(EFlags);
13089 IEM_MC_ADVANCE_RIP_AND_FINISH();
13090 IEM_MC_END();
13091 break;
13092 }
13093
13094 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13095 }
13096 }
13097}
13098
13099
13100/** Opcode 0xf7 /2. */
13101FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
13102{
13103 IEMOP_MNEMONIC(not_Ev, "not Ev");
13104 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
13105 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
13106}
13107
13108
13109/** Opcode 0xf7 /3. */
13110FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
13111{
13112 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
13113 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
13114 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
13115}
13116
13117
13118/**
13119 * @opcode 0xf7
13120 */
13121FNIEMOP_DEF(iemOp_Grp3_Ev)
13122{
13123 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13124 switch (IEM_GET_MODRM_REG_8(bRm))
13125 {
13126 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13127 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13128 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
13129 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
13130 case 4:
13131 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
13132 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13133 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
13134 case 5:
13135 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
13136 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13137 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
13138 case 6:
13139 IEMOP_MNEMONIC(div_Ev, "div Ev");
13140 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13141 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
13142 case 7:
13143 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
13144 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13145 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
13146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13147 }
13148}
13149
13150
13151/**
13152 * @opcode 0xf8
13153 */
13154FNIEMOP_DEF(iemOp_clc)
13155{
13156 IEMOP_MNEMONIC(clc, "clc");
13157 IEM_MC_BEGIN(0, 0);
13158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13159 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
13160 IEM_MC_ADVANCE_RIP_AND_FINISH();
13161 IEM_MC_END();
13162}
13163
13164
13165/**
13166 * @opcode 0xf9
13167 */
13168FNIEMOP_DEF(iemOp_stc)
13169{
13170 IEMOP_MNEMONIC(stc, "stc");
13171 IEM_MC_BEGIN(0, 0);
13172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13173 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
13174 IEM_MC_ADVANCE_RIP_AND_FINISH();
13175 IEM_MC_END();
13176}
13177
13178
13179/**
13180 * @opcode 0xfa
13181 */
13182FNIEMOP_DEF(iemOp_cli)
13183{
13184 IEMOP_MNEMONIC(cli, "cli");
13185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13186 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, iemCImpl_cli);
13187}
13188
13189
13190FNIEMOP_DEF(iemOp_sti)
13191{
13192 IEMOP_MNEMONIC(sti, "sti");
13193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13194 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER | IEM_CIMPL_F_VMEXIT, iemCImpl_sti);
13195}
13196
13197
13198/**
13199 * @opcode 0xfc
13200 */
13201FNIEMOP_DEF(iemOp_cld)
13202{
13203 IEMOP_MNEMONIC(cld, "cld");
13204 IEM_MC_BEGIN(0, 0);
13205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13206 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
13207 IEM_MC_ADVANCE_RIP_AND_FINISH();
13208 IEM_MC_END();
13209}
13210
13211
13212/**
13213 * @opcode 0xfd
13214 */
13215FNIEMOP_DEF(iemOp_std)
13216{
13217 IEMOP_MNEMONIC(std, "std");
13218 IEM_MC_BEGIN(0, 0);
13219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13220 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
13221 IEM_MC_ADVANCE_RIP_AND_FINISH();
13222 IEM_MC_END();
13223}
13224
13225
13226/**
13227 * @opmaps grp4
13228 * @opcode /0
13229 */
13230FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
13231{
13232 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
13233 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
13234}
13235
13236
13237/**
13238 * @opmaps grp4
13239 * @opcode /1
13240 */
13241FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
13242{
13243 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
13244 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
13245}
13246
13247
13248/**
13249 * @opcode 0xfe
13250 */
13251FNIEMOP_DEF(iemOp_Grp4)
13252{
13253 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13254 switch (IEM_GET_MODRM_REG_8(bRm))
13255 {
13256 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
13257 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
13258 default:
13259 /** @todo is the eff-addr decoded? */
13260 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
13261 IEMOP_RAISE_INVALID_OPCODE_RET();
13262 }
13263}
13264
13265/** Opcode 0xff /0. */
13266FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
13267{
13268 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
13269 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
13270 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
13271}
13272
13273
13274/** Opcode 0xff /1. */
13275FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
13276{
13277 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
13278 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
13279 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
13280}
13281
13282
13283/**
13284 * Opcode 0xff /2.
13285 * @param bRm The RM byte.
13286 */
13287FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
13288{
13289 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
13290 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13291
13292 if (IEM_IS_MODRM_REG_MODE(bRm))
13293 {
13294 /* The new RIP is taken from a register. */
13295 switch (pVCpu->iem.s.enmEffOpSize)
13296 {
13297 case IEMMODE_16BIT:
13298 IEM_MC_BEGIN(1, 0);
13299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13300 IEM_MC_ARG(uint16_t, u16Target, 0);
13301 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13302 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
13303 IEM_MC_END();
13304 break;
13305
13306 case IEMMODE_32BIT:
13307 IEM_MC_BEGIN(1, 0);
13308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13309 IEM_MC_ARG(uint32_t, u32Target, 0);
13310 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13311 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
13312 IEM_MC_END();
13313 break;
13314
13315 case IEMMODE_64BIT:
13316 IEM_MC_BEGIN(1, 0);
13317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13318 IEM_MC_ARG(uint64_t, u64Target, 0);
13319 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13320 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
13321 IEM_MC_END();
13322 break;
13323
13324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13325 }
13326 }
13327 else
13328 {
13329 /* The new RIP is taken from a register. */
13330 switch (pVCpu->iem.s.enmEffOpSize)
13331 {
13332 case IEMMODE_16BIT:
13333 IEM_MC_BEGIN(1, 1);
13334 IEM_MC_ARG(uint16_t, u16Target, 0);
13335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13338 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13339 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
13340 IEM_MC_END();
13341 break;
13342
13343 case IEMMODE_32BIT:
13344 IEM_MC_BEGIN(1, 1);
13345 IEM_MC_ARG(uint32_t, u32Target, 0);
13346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13349 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13350 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
13351 IEM_MC_END();
13352 break;
13353
13354 case IEMMODE_64BIT:
13355 IEM_MC_BEGIN(1, 1);
13356 IEM_MC_ARG(uint64_t, u64Target, 0);
13357 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13360 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13361 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
13362 IEM_MC_END();
13363 break;
13364
13365 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13366 }
13367 }
13368}
13369
13370#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl) \
13371 /* Registers? How?? */ \
13372 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
13373 { /* likely */ } \
13374 else \
13375 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
13376 \
13377 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
13378 /** @todo what does VIA do? */ \
13379 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
13380 { /* likely */ } \
13381 else \
13382 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
13383 \
13384 /* Far pointer loaded from memory. */ \
13385 switch (pVCpu->iem.s.enmEffOpSize) \
13386 { \
13387 case IEMMODE_16BIT: \
13388 IEM_MC_BEGIN(3, 1); \
13389 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13390 IEM_MC_ARG(uint16_t, offSeg, 1); \
13391 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
13392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13395 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13396 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
13397 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
13398 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
13399 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13400 IEM_MC_END(); \
13401 break; \
13402 \
13403 case IEMMODE_32BIT: \
13404 IEM_MC_BEGIN(3, 1); \
13405 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13406 IEM_MC_ARG(uint32_t, offSeg, 1); \
13407 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
13408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13411 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13412 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
13413 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
13414 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
13415 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13416 IEM_MC_END(); \
13417 break; \
13418 \
13419 case IEMMODE_64BIT: \
13420 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
13421 IEM_MC_BEGIN(3, 1); \
13422 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13423 IEM_MC_ARG(uint64_t, offSeg, 1); \
13424 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
13425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13428 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13429 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
13430 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE /* no gates */, \
13431 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13432 IEM_MC_END(); \
13433 break; \
13434 \
13435 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13436 } do {} while (0)
13437
13438
13439/**
13440 * Opcode 0xff /3.
13441 * @param bRm The RM byte.
13442 */
13443FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
13444{
13445 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
13446 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf);
13447}
13448
13449
13450/**
13451 * Opcode 0xff /4.
13452 * @param bRm The RM byte.
13453 */
13454FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
13455{
13456 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
13457 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13458
13459 if (IEM_IS_MODRM_REG_MODE(bRm))
13460 {
13461 /* The new RIP is taken from a register. */
13462 switch (pVCpu->iem.s.enmEffOpSize)
13463 {
13464 case IEMMODE_16BIT:
13465 IEM_MC_BEGIN(0, 1);
13466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13467 IEM_MC_LOCAL(uint16_t, u16Target);
13468 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13469 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
13470 IEM_MC_END();
13471 break;
13472
13473 case IEMMODE_32BIT:
13474 IEM_MC_BEGIN(0, 1);
13475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13476 IEM_MC_LOCAL(uint32_t, u32Target);
13477 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13478 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
13479 IEM_MC_END();
13480 break;
13481
13482 case IEMMODE_64BIT:
13483 IEM_MC_BEGIN(0, 1);
13484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13485 IEM_MC_LOCAL(uint64_t, u64Target);
13486 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13487 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
13488 IEM_MC_END();
13489 break;
13490
13491 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13492 }
13493 }
13494 else
13495 {
13496 /* The new RIP is taken from a memory location. */
13497 switch (pVCpu->iem.s.enmEffOpSize)
13498 {
13499 case IEMMODE_16BIT:
13500 IEM_MC_BEGIN(0, 2);
13501 IEM_MC_LOCAL(uint16_t, u16Target);
13502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13505 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13506 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
13507 IEM_MC_END();
13508 break;
13509
13510 case IEMMODE_32BIT:
13511 IEM_MC_BEGIN(0, 2);
13512 IEM_MC_LOCAL(uint32_t, u32Target);
13513 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13516 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13517 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
13518 IEM_MC_END();
13519 break;
13520
13521 case IEMMODE_64BIT:
13522 IEM_MC_BEGIN(0, 2);
13523 IEM_MC_LOCAL(uint64_t, u64Target);
13524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13527 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13528 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
13529 IEM_MC_END();
13530 break;
13531
13532 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13533 }
13534 }
13535}
13536
13537
13538/**
13539 * Opcode 0xff /5.
13540 * @param bRm The RM byte.
13541 */
13542FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
13543{
13544 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
13545 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp);
13546}
13547
13548
13549/**
13550 * Opcode 0xff /6.
13551 * @param bRm The RM byte.
13552 */
13553FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
13554{
13555 IEMOP_MNEMONIC(push_Ev, "push Ev");
13556
13557 /* Registers are handled by a common worker. */
13558 if (IEM_IS_MODRM_REG_MODE(bRm))
13559 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
13560
13561 /* Memory we do here. */
13562 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13563 switch (pVCpu->iem.s.enmEffOpSize)
13564 {
13565 case IEMMODE_16BIT:
13566 IEM_MC_BEGIN(0, 2);
13567 IEM_MC_LOCAL(uint16_t, u16Src);
13568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13571 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13572 IEM_MC_PUSH_U16(u16Src);
13573 IEM_MC_ADVANCE_RIP_AND_FINISH();
13574 IEM_MC_END();
13575 break;
13576
13577 case IEMMODE_32BIT:
13578 IEM_MC_BEGIN(0, 2);
13579 IEM_MC_LOCAL(uint32_t, u32Src);
13580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13583 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13584 IEM_MC_PUSH_U32(u32Src);
13585 IEM_MC_ADVANCE_RIP_AND_FINISH();
13586 IEM_MC_END();
13587 break;
13588
13589 case IEMMODE_64BIT:
13590 IEM_MC_BEGIN(0, 2);
13591 IEM_MC_LOCAL(uint64_t, u64Src);
13592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13595 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13596 IEM_MC_PUSH_U64(u64Src);
13597 IEM_MC_ADVANCE_RIP_AND_FINISH();
13598 IEM_MC_END();
13599 break;
13600
13601 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13602 }
13603}
13604
13605
13606/**
13607 * @opcode 0xff
13608 */
13609FNIEMOP_DEF(iemOp_Grp5)
13610{
13611 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13612 switch (IEM_GET_MODRM_REG_8(bRm))
13613 {
13614 case 0:
13615 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
13616 case 1:
13617 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
13618 case 2:
13619 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
13620 case 3:
13621 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
13622 case 4:
13623 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
13624 case 5:
13625 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
13626 case 6:
13627 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
13628 case 7:
13629 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
13630 IEMOP_RAISE_INVALID_OPCODE_RET();
13631 }
13632 AssertFailedReturn(VERR_IEM_IPE_3);
13633}
13634
13635
13636
13637const PFNIEMOP g_apfnOneByteMap[256] =
13638{
13639 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
13640 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
13641 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
13642 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
13643 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
13644 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
13645 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
13646 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
13647 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
13648 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
13649 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
13650 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
13651 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
13652 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
13653 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
13654 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
13655 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
13656 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
13657 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
13658 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
13659 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
13660 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
13661 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
13662 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
13663 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
13664 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
13665 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
13666 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
13667 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
13668 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
13669 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
13670 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
13671 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
13672 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
13673 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
13674 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
13675 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
13676 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
13677 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
13678 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
13679 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
13680 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
13681 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
13682 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
13683 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
13684 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
13685 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
13686 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
13687 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
13688 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
13689 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
13690 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
13691 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
13692 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
13693 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
13694 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
13695 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
13696 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
13697 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
13698 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
13699 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
13700 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
13701 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
13702 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
13703};
13704
13705
13706/** @} */
13707
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette